1use chrono::{DateTime, NaiveDate, Utc};
5use std::fmt;
6
7pub const TEMPOCH_DATA_DIR_ENV: &str = "TEMPOCH_DATA_DIR";
8pub const UTC_TAI_HISTORY_URL: &str = "https://hpiers.obspm.fr/eoppc/bul/bulc/UTC-TAI.history";
9pub const DELTA_T_OBSERVED_URL: &str = "https://maia.usno.navy.mil/ser7/deltat.data";
10pub const DELTA_T_PREDICTIONS_URL: &str = "https://maia.usno.navy.mil/ser7/deltat.preds";
11pub const EOP_FINALS_URL: &str = "https://datacenter.iers.org/data/9/finals2000A.all";
12pub const PRE_1961_TAI_MINUS_UTC_APPROX: f64 = 10.0;
13
14#[derive(Debug, Clone, Copy, PartialEq)]
15pub struct UtcTaiSegment {
16 pub start_mjd: i32,
17 pub end_mjd: Option<i32>,
18 pub base_seconds: f64,
19 pub reference_mjd: f64,
20 pub slope_seconds_per_day: f64,
21}
22
23#[derive(Debug, Clone, Copy, PartialEq)]
24pub struct EopPoint {
25 pub mjd: i32,
26 pub pm_observed: bool,
27 pub ut1_observed: bool,
28 pub nutation_observed: bool,
29 pub pm_xp_arcsec: Option<f64>,
30 pub pm_yp_arcsec: Option<f64>,
31 pub ut1_minus_utc_seconds: f64,
32 pub lod_milliseconds: Option<f64>,
33 pub dx_milliarcsec: Option<f64>,
34 pub dy_milliarcsec: Option<f64>,
35}
36
37#[derive(Debug, Clone, PartialEq, Eq)]
38pub struct TimeDataProvenance {
39 fetched_utc: String,
40 utc_tai_sha256: String,
41 delta_t_observed_sha256: String,
42 delta_t_predictions_sha256: String,
43 eop_finals_sha256: String,
44}
45
46impl TimeDataProvenance {
47 pub fn new(
48 fetched_utc: impl Into<String>,
49 utc_tai_sha256: impl Into<String>,
50 delta_t_observed_sha256: impl Into<String>,
51 delta_t_predictions_sha256: impl Into<String>,
52 eop_finals_sha256: impl Into<String>,
53 ) -> Self {
54 Self {
55 fetched_utc: fetched_utc.into(),
56 utc_tai_sha256: utc_tai_sha256.into(),
57 delta_t_observed_sha256: delta_t_observed_sha256.into(),
58 delta_t_predictions_sha256: delta_t_predictions_sha256.into(),
59 eop_finals_sha256: eop_finals_sha256.into(),
60 }
61 }
62
63 pub fn fetched_utc(&self) -> &str {
64 &self.fetched_utc
65 }
66
67 pub fn fetched_at(&self) -> Option<DateTime<Utc>> {
68 chrono::NaiveDateTime::parse_from_str(&self.fetched_utc, "%Y-%m-%dT%H:%M:%S")
69 .ok()
70 .map(|dt| dt.and_utc())
71 }
72
73 pub fn utc_tai_sha256(&self) -> &str {
74 &self.utc_tai_sha256
75 }
76
77 pub fn delta_t_observed_sha256(&self) -> &str {
78 &self.delta_t_observed_sha256
79 }
80
81 pub fn delta_t_predictions_sha256(&self) -> &str {
82 &self.delta_t_predictions_sha256
83 }
84
85 pub fn eop_finals_sha256(&self) -> &str {
86 &self.eop_finals_sha256
87 }
88}
89
90#[derive(Debug, Clone)]
91pub struct TimeDataBundle {
92 utc_tai_segments: Vec<UtcTaiSegment>,
93 modern_delta_t_points: Vec<(f64, f64)>,
94 modern_delta_t_observed_end_mjd: f64,
95 eop_points: Vec<EopPoint>,
96 provenance: TimeDataProvenance,
97}
98
99impl TimeDataBundle {
100 pub fn new(
101 utc_tai_segments: Vec<UtcTaiSegment>,
102 modern_delta_t_points: Vec<(f64, f64)>,
103 modern_delta_t_observed_end_mjd: f64,
104 eop_points: Vec<EopPoint>,
105 provenance: TimeDataProvenance,
106 ) -> Self {
107 Self {
108 utc_tai_segments,
109 modern_delta_t_points,
110 modern_delta_t_observed_end_mjd,
111 eop_points,
112 provenance,
113 }
114 }
115
116 pub fn utc_tai_segments(&self) -> &[UtcTaiSegment] {
117 &self.utc_tai_segments
118 }
119
120 pub fn modern_delta_t_points(&self) -> &[(f64, f64)] {
121 &self.modern_delta_t_points
122 }
123
124 pub fn modern_delta_t_observed_end_mjd(&self) -> f64 {
125 self.modern_delta_t_observed_end_mjd
126 }
127
128 pub fn eop_points(&self) -> &[EopPoint] {
129 &self.eop_points
130 }
131
132 pub fn provenance(&self) -> &TimeDataProvenance {
133 &self.provenance
134 }
135
136 pub fn eop_observed_end_mjd(&self) -> i32 {
137 observed_end_mjd(&self.eop_points)
138 }
139
140 pub fn eop_end_mjd(&self) -> i32 {
141 self.eop_points
142 .last()
143 .map(|point| point.mjd)
144 .unwrap_or_default()
145 }
146
147 #[cfg(feature = "fetch")]
148 fn from_raw_sources(
149 utc_tai_history: &str,
150 delta_t_observed: &str,
151 delta_t_predictions: &str,
152 eop_finals: &str,
153 provenance: TimeDataProvenance,
154 ) -> Result<Self, TimeDataError> {
155 let utc_tai_segments =
156 parse_utc_tai_segments(utc_tai_history).map_err(TimeDataError::Parse)?;
157 let observed = parse_delta_t_observed(delta_t_observed).map_err(TimeDataError::Parse)?;
158 let predicted =
159 parse_delta_t_predictions(delta_t_predictions).map_err(TimeDataError::Parse)?;
160 let (modern_delta_t_points, modern_delta_t_observed_end_mjd) =
161 build_modern_delta_t_points(&observed, &predicted).map_err(TimeDataError::Parse)?;
162 let eop_points = parse_eop_finals(eop_finals).map_err(TimeDataError::Parse)?;
163 Ok(Self::new(
164 utc_tai_segments,
165 modern_delta_t_points,
166 modern_delta_t_observed_end_mjd,
167 eop_points,
168 provenance,
169 ))
170 }
171}
172
173#[derive(Debug)]
174pub enum TimeDataError {
175 Io(std::io::Error),
176 Download(String),
177 Parse(String),
178 Integrity(String),
179}
180
181impl fmt::Display for TimeDataError {
182 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
183 match self {
184 Self::Io(err) => write!(f, "I/O error: {err}"),
185 Self::Download(msg) => write!(f, "download error: {msg}"),
186 Self::Parse(msg) => write!(f, "parse error: {msg}"),
187 Self::Integrity(msg) => write!(f, "integrity error: {msg}"),
188 }
189 }
190}
191
192impl std::error::Error for TimeDataError {
193 fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
194 match self {
195 Self::Io(err) => Some(err),
196 _ => None,
197 }
198 }
199}
200
201impl From<std::io::Error> for TimeDataError {
202 fn from(value: std::io::Error) -> Self {
203 Self::Io(value)
204 }
205}
206
207#[cfg(feature = "fetch")]
208mod fetch_support {
209 use super::*;
210 use serde_json::Value;
211 use sha2::{Digest, Sha256};
212 use std::fs;
213 use std::io::Read;
214 use std::path::{Path, PathBuf};
215 use std::time::{SystemTime, UNIX_EPOCH};
216
217 const DEFAULT_SUBDIR: &str = ".tempoch/data";
218 const BUNDLE_DIR_NAME: &str = "bundle";
219 const PROVENANCE_FILE: &str = "time_data.provenance.json";
220 const UTC_TAI_HISTORY_FILE: &str = "UTC-TAI.history";
221 const DELTA_T_OBSERVED_FILE: &str = "deltat.data";
222 const DELTA_T_PREDICTIONS_FILE: &str = "deltat.preds";
223 const EOP_FINALS_FILE: &str = "finals2000A.all";
224 const FETCH_TIMEOUT_SECS: u64 = 60;
225
226 pub struct TimeDataManager {
227 data_dir: PathBuf,
228 }
229
230 impl TimeDataManager {
231 pub fn new() -> Result<Self, TimeDataError> {
232 let data_dir = resolve_data_dir()?;
233 fs::create_dir_all(&data_dir)?;
234 Ok(Self { data_dir })
235 }
236
237 pub fn with_dir(dir: impl Into<PathBuf>) -> Result<Self, TimeDataError> {
238 let data_dir = dir.into();
239 fs::create_dir_all(&data_dir)?;
240 Ok(Self { data_dir })
241 }
242
243 pub fn data_dir(&self) -> &Path {
244 &self.data_dir
245 }
246
247 pub fn load_cached(&self) -> Result<TimeDataBundle, TimeDataError> {
248 load_cached_bundle(self.bundle_dir())
249 }
250
251 pub fn refresh(&self) -> Result<(), TimeDataError> {
252 fs::create_dir_all(&self.data_dir)?;
253 let staging_dir = self.staging_dir();
254 if staging_dir.exists() {
255 fs::remove_dir_all(&staging_dir)?;
256 }
257 fs::create_dir_all(&staging_dir)?;
258
259 let fetch_ts = chrono::Utc::now().format("%Y-%m-%dT%H:%M:%S").to_string();
260 let utc_tai = fetch_text(UTC_TAI_HISTORY_URL)?;
261 let delta_obs = fetch_text(DELTA_T_OBSERVED_URL)?;
262 let delta_pred = fetch_text(DELTA_T_PREDICTIONS_URL)?;
263 let eop = fetch_text(EOP_FINALS_URL)?;
264
265 fs::write(staging_dir.join(UTC_TAI_HISTORY_FILE), &utc_tai.text)?;
266 fs::write(staging_dir.join(DELTA_T_OBSERVED_FILE), &delta_obs.text)?;
267 fs::write(staging_dir.join(DELTA_T_PREDICTIONS_FILE), &delta_pred.text)?;
268 fs::write(staging_dir.join(EOP_FINALS_FILE), &eop.text)?;
269
270 let provenance = TimeDataProvenance::new(
271 fetch_ts,
272 utc_tai.sha256,
273 delta_obs.sha256,
274 delta_pred.sha256,
275 eop.sha256,
276 );
277 fs::write(
278 staging_dir.join(PROVENANCE_FILE),
279 render_provenance_json(&provenance),
280 )?;
281
282 load_cached_bundle(staging_dir.clone())?;
283 swap_bundle_dirs(&staging_dir, self.bundle_dir())?;
284 Ok(())
285 }
286
287 pub fn refresh_and_load(&self) -> Result<TimeDataBundle, TimeDataError> {
288 self.refresh()?;
289 self.load_cached()
290 }
291
292 fn bundle_dir(&self) -> PathBuf {
293 self.data_dir.join(BUNDLE_DIR_NAME)
294 }
295
296 fn staging_dir(&self) -> PathBuf {
297 let nonce = SystemTime::now()
298 .duration_since(UNIX_EPOCH)
299 .unwrap_or_default()
300 .as_nanos();
301 self.data_dir.join(format!(
302 ".{BUNDLE_DIR_NAME}.staging-{}-{nonce}",
303 std::process::id()
304 ))
305 }
306 }
307
308 struct DownloadedText {
309 text: String,
310 sha256: String,
311 }
312
313 fn resolve_data_dir() -> Result<PathBuf, TimeDataError> {
314 if let Ok(dir) = std::env::var(TEMPOCH_DATA_DIR_ENV) {
315 let trimmed = dir.trim();
316 if !trimmed.is_empty() {
317 return Ok(PathBuf::from(trimmed));
318 }
319 }
320
321 let home = std::env::var("HOME")
322 .or_else(|_| std::env::var("USERPROFILE"))
323 .map_err(|_| {
324 TimeDataError::Io(std::io::Error::new(
325 std::io::ErrorKind::NotFound,
326 "Cannot determine home directory. Set TEMPOCH_DATA_DIR explicitly.",
327 ))
328 })?;
329
330 Ok(PathBuf::from(home).join(DEFAULT_SUBDIR))
331 }
332
333 fn load_cached_bundle(bundle_dir: PathBuf) -> Result<TimeDataBundle, TimeDataError> {
334 if !bundle_dir.exists() {
335 return Err(TimeDataError::Integrity(format!(
336 "cached bundle not found at {}",
337 bundle_dir.display()
338 )));
339 }
340
341 let utc_tai_history = read_text(bundle_dir.join(UTC_TAI_HISTORY_FILE))?;
342 let delta_t_observed = read_text(bundle_dir.join(DELTA_T_OBSERVED_FILE))?;
343 let delta_t_predictions = read_text(bundle_dir.join(DELTA_T_PREDICTIONS_FILE))?;
344 let eop_finals = read_text(bundle_dir.join(EOP_FINALS_FILE))?;
345 let provenance_text = read_text(bundle_dir.join(PROVENANCE_FILE))?;
346 let provenance = parse_provenance_json(&provenance_text)?;
347
348 verify_sha256(
349 "UTC-TAI history",
350 &utc_tai_history,
351 provenance.utc_tai_sha256(),
352 )?;
353 verify_sha256(
354 "Delta T observed",
355 &delta_t_observed,
356 provenance.delta_t_observed_sha256(),
357 )?;
358 verify_sha256(
359 "Delta T predictions",
360 &delta_t_predictions,
361 provenance.delta_t_predictions_sha256(),
362 )?;
363 verify_sha256("EOP finals", &eop_finals, provenance.eop_finals_sha256())?;
364
365 TimeDataBundle::from_raw_sources(
366 &utc_tai_history,
367 &delta_t_observed,
368 &delta_t_predictions,
369 &eop_finals,
370 provenance,
371 )
372 }
373
374 fn swap_bundle_dirs(staging_dir: &Path, live_dir: PathBuf) -> Result<(), TimeDataError> {
375 let backup_dir = live_dir.with_extension("backup");
376 if backup_dir.exists() {
377 fs::remove_dir_all(&backup_dir)?;
378 }
379 if live_dir.exists() {
380 fs::rename(&live_dir, &backup_dir)?;
381 }
382 match fs::rename(staging_dir, &live_dir) {
383 Ok(()) => {
384 if backup_dir.exists() {
385 fs::remove_dir_all(&backup_dir)?;
386 }
387 Ok(())
388 }
389 Err(err) => {
390 if backup_dir.exists() && !live_dir.exists() {
391 let _ = fs::rename(&backup_dir, &live_dir);
392 }
393 Err(TimeDataError::Io(err))
394 }
395 }
396 }
397
398 fn read_text(path: PathBuf) -> Result<String, TimeDataError> {
399 fs::read_to_string(&path).map_err(|err| {
400 TimeDataError::Io(std::io::Error::new(
401 err.kind(),
402 format!("{}: {err}", path.display()),
403 ))
404 })
405 }
406
407 fn fetch_text(url: &str) -> Result<DownloadedText, TimeDataError> {
408 let response = ureq::get(url)
409 .set("User-Agent", "tempoch-runtime-data/1.0")
410 .timeout(std::time::Duration::from_secs(FETCH_TIMEOUT_SECS))
411 .call()
412 .map_err(|err| TimeDataError::Download(format!("fetch {url} failed: {err}")))?;
413 let bytes = {
414 let mut buf = Vec::new();
415 let mut reader = response.into_reader();
416 reader
417 .read_to_end(&mut buf)
418 .map_err(|err| TimeDataError::Download(format!("read {url} body failed: {err}")))?;
419 buf
420 };
421 let text = String::from_utf8(bytes.clone())
422 .map_err(|err| TimeDataError::Download(format!("{url} is not UTF-8: {err}")))?;
423 Ok(DownloadedText {
424 text,
425 sha256: sha256_bytes(&bytes),
426 })
427 }
428
429 fn render_provenance_json(provenance: &TimeDataProvenance) -> String {
430 let value = serde_json::json!({
431 "fetched_utc": provenance.fetched_utc(),
432 "utc_tai_sha256": provenance.utc_tai_sha256(),
433 "delta_t_observed_sha256": provenance.delta_t_observed_sha256(),
434 "delta_t_predictions_sha256": provenance.delta_t_predictions_sha256(),
435 "eop_finals_sha256": provenance.eop_finals_sha256(),
436 });
437 let mut rendered = serde_json::to_string_pretty(&value)
438 .expect("serializing time-data provenance should work");
439 rendered.push('\n');
440 rendered
441 }
442
443 fn parse_provenance_json(text: &str) -> Result<TimeDataProvenance, TimeDataError> {
444 let json: Value =
445 serde_json::from_str(text).map_err(|err| TimeDataError::Integrity(err.to_string()))?;
446 let string_field = |name: &str| -> Result<String, TimeDataError> {
447 json.get(name)
448 .and_then(Value::as_str)
449 .map(str::to_owned)
450 .ok_or_else(|| TimeDataError::Integrity(format!("missing provenance field {name}")))
451 };
452 Ok(TimeDataProvenance::new(
453 string_field("fetched_utc")?,
454 string_field("utc_tai_sha256")?,
455 string_field("delta_t_observed_sha256")?,
456 string_field("delta_t_predictions_sha256")?,
457 string_field("eop_finals_sha256")?,
458 ))
459 }
460
461 fn sha256_bytes(bytes: &[u8]) -> String {
462 let mut hasher = Sha256::new();
463 hasher.update(bytes);
464 let digest = hasher.finalize();
465 let mut out = String::with_capacity(digest.len() * 2);
466 for byte in digest {
467 out.push_str(&format!("{byte:02x}"));
468 }
469 out
470 }
471
472 fn verify_sha256(label: &str, text: &str, expected: &str) -> Result<(), TimeDataError> {
473 let actual = sha256_bytes(text.as_bytes());
474 if actual != expected {
475 return Err(TimeDataError::Integrity(format!(
476 "{label} SHA-256 mismatch: expected {expected}, got {actual}"
477 )));
478 }
479 Ok(())
480 }
481}
482
483#[cfg(feature = "fetch")]
484pub use fetch_support::TimeDataManager;
485
486fn mjd_epoch() -> NaiveDate {
487 NaiveDate::from_ymd_opt(1858, 11, 17).unwrap()
488}
489
490fn mjd_from_date(d: NaiveDate) -> i32 {
491 (d - mjd_epoch()).num_days() as i32
492}
493
494fn normalize_ws(s: &str) -> String {
495 s.replace('\t', " ")
496 .split_whitespace()
497 .collect::<Vec<_>>()
498 .join(" ")
499}
500
501fn parse_month(token: &str) -> Result<u32, String> {
502 let key: String = token
503 .chars()
504 .filter(|c| c.is_ascii_alphabetic())
505 .map(|c| c.to_ascii_lowercase())
506 .collect();
507 let month = match key.as_str() {
508 "jan" | "january" => 1,
509 "feb" | "february" => 2,
510 "mar" | "march" => 3,
511 "apr" | "april" => 4,
512 "may" => 5,
513 "jun" | "june" => 6,
514 "jul" | "july" => 7,
515 "aug" | "august" => 8,
516 "sep" | "sept" | "september" => 9,
517 "oct" | "october" => 10,
518 "nov" | "november" => 11,
519 "dec" | "december" => 12,
520 _ => return Err(format!("unknown month token: {token:?}")),
521 };
522 Ok(month)
523}
524
525fn parse_date_fragment(fragment: &str, default_year: Option<i32>) -> Result<NaiveDate, String> {
526 let normalized = normalize_ws(fragment);
527 let normalized = normalized.trim_end_matches('.').trim();
528 let tokens: Vec<&str> = normalized.split_whitespace().collect();
529 let (year, month_token, day_token) = match tokens.as_slice() {
530 [year, month, day] if year.len() == 4 && year.chars().all(|c| c.is_ascii_digit()) => (
531 year.parse::<i32>().map_err(|err| err.to_string())?,
532 *month,
533 *day,
534 ),
535 [month, day] => (
536 default_year.ok_or_else(|| format!("missing year for fragment: {fragment:?}"))?,
537 *month,
538 *day,
539 ),
540 _ => return Err(format!("unable to parse date fragment: {fragment:?}")),
541 };
542 let month = parse_month(month_token)?;
543 let day = day_token
544 .parse::<u32>()
545 .map_err(|_| format!("bad day in fragment: {fragment:?}"))?;
546 NaiveDate::from_ymd_opt(year, month, day)
547 .ok_or_else(|| format!("invalid calendar date in fragment: {fragment:?}"))
548}
549
550fn compact_number(s: &str) -> Result<f64, String> {
551 s.replace(' ', "")
552 .parse::<f64>()
553 .map_err(|err| format!("bad number {s:?}: {err}"))
554}
555
556fn extract_base_seconds(formula: &str) -> Result<f64, String> {
557 let bytes = formula.as_bytes();
558 let mut index = 0usize;
559 while index < bytes.len() {
560 if bytes[index] == b's' {
561 let mut start = index;
562 while start > 0 {
563 let c = bytes[start - 1];
564 if c.is_ascii_digit() || c == b'.' || c == b' ' {
565 start -= 1;
566 } else {
567 break;
568 }
569 }
570 let candidate = &formula[start..index];
571 if candidate.chars().any(|c| c.is_ascii_digit()) {
572 return compact_number(candidate);
573 }
574 }
575 index += 1;
576 }
577 Err(format!("unable to parse TAI-UTC base from {formula:?}"))
578}
579
580fn extract_slope(formula: &str) -> Result<Option<(f64, f64)>, String> {
581 let Some(mjd_idx) = formula.find("MJD") else {
582 return Ok(None);
583 };
584 let rest = &formula[mjd_idx + 3..];
585 let rest = rest.trim_start();
586 if !rest.starts_with('-') {
587 return Ok(None);
588 }
589 let after_dash = rest[1..].trim_start();
590 let ref_end = after_dash
591 .char_indices()
592 .find(|(_, c)| !(c.is_ascii_digit() || *c == ' '))
593 .map(|(idx, _)| idx)
594 .unwrap_or(after_dash.len());
595 let ref_str = after_dash[..ref_end].trim();
596 if ref_str.is_empty() {
597 return Ok(None);
598 }
599 let reference_mjd = compact_number(ref_str)?;
600 let after_ref = after_dash[ref_end..].trim_start();
601 let after_paren = after_ref
602 .strip_prefix(')')
603 .unwrap_or(after_ref)
604 .trim_start();
605 let after_x = match after_paren.strip_prefix('x') {
606 Some(rest) => rest.trim_start(),
607 None => return Ok(None),
608 };
609 let slope_end = after_x
610 .char_indices()
611 .find(|(_, c)| !(c.is_ascii_digit() || *c == '.' || *c == ' '))
612 .map(|(idx, _)| idx)
613 .unwrap_or(after_x.len());
614 let slope_str = after_x[..slope_end].trim();
615 if slope_str.is_empty() {
616 return Ok(None);
617 }
618 let rest_after_slope = after_x[slope_end..].trim_start();
619 if !rest_after_slope.starts_with('s') {
620 return Ok(None);
621 }
622 let slope = compact_number(slope_str)?;
623 Ok(Some((reference_mjd, slope)))
624}
625
626pub fn parse_utc_tai_segments(text: &str) -> Result<Vec<UtcTaiSegment>, String> {
627 let mut segments = Vec::new();
628 let mut previous_end: Option<NaiveDate> = None;
629 let mut previous_reference_mjd: Option<f64> = None;
630 let mut previous_slope: Option<f64> = None;
631
632 for raw_line in text.lines() {
633 let line = raw_line.trim_end();
634 if !line.contains('-')
635 || line.contains("UTC-TAI.history")
636 || line.contains("Limits of validity")
637 {
638 continue;
639 }
640 if !line.chars().any(|c| c.is_ascii_digit()) {
641 continue;
642 }
643
644 let dash_idx = line.find('-').unwrap();
645 let (left, right) = line.split_at(dash_idx);
646 let right = &right[1..];
647 if !left.chars().any(|c| c.is_ascii_alphabetic()) {
648 continue;
649 }
650
651 let default_start_year = previous_end.map(date_year);
652 let start_date = parse_date_fragment(left, default_start_year)?;
653 let right_normalized = normalize_ws(right);
654 let (end_date, formula) = match parse_end_and_formula(&right_normalized, start_date) {
655 Some((end_date, formula)) => (Some(end_date), formula),
656 None => (None, right_normalized.clone()),
657 };
658
659 let base_seconds = extract_base_seconds(&formula)?;
660 let (reference_mjd, slope_seconds_per_day) =
661 if let Some((reference_mjd, slope)) = extract_slope(&formula)? {
662 (reference_mjd, slope)
663 } else if formula.contains("\"\"") {
664 match (previous_reference_mjd, previous_slope) {
665 (Some(reference_mjd), Some(slope)) => (reference_mjd, slope),
666 _ => {
667 return Err(format!(
668 "repeated UTC formula without previous state: {formula:?}"
669 ))
670 }
671 }
672 } else {
673 (mjd_from_date(start_date) as f64, 0.0)
674 };
675
676 segments.push(UtcTaiSegment {
677 start_mjd: mjd_from_date(start_date),
678 end_mjd: end_date.map(mjd_from_date),
679 base_seconds,
680 reference_mjd,
681 slope_seconds_per_day,
682 });
683
684 previous_end = end_date;
685 previous_reference_mjd = Some(reference_mjd);
686 previous_slope = Some(slope_seconds_per_day);
687 }
688
689 validate_utc_tai_segments(&segments)?;
690 Ok(segments)
691}
692
693fn date_year(date: NaiveDate) -> i32 {
694 use chrono::Datelike;
695 date.year()
696}
697
698fn parse_end_and_formula(
699 right_normalized: &str,
700 start_date: NaiveDate,
701) -> Option<(NaiveDate, String)> {
702 let tokens: Vec<&str> = right_normalized.splitn(4, ' ').collect();
703 if tokens.len() < 3 {
704 return None;
705 }
706 if tokens.len() == 4
707 && tokens[0].len() == 4
708 && tokens[0].chars().all(|c| c.is_ascii_digit())
709 && parse_month(tokens[1]).is_ok()
710 && !tokens[2].is_empty()
711 && tokens[2].chars().all(|c| c.is_ascii_digit())
712 {
713 let year = tokens[0].parse::<i32>().ok()?;
714 let month = parse_month(tokens[1]).ok()?;
715 let day = tokens[2].parse::<u32>().ok()?;
716 let end_date = NaiveDate::from_ymd_opt(year, month, day)?;
717 return Some((end_date, tokens[3].to_string()));
718 }
719 if parse_month(tokens[0]).is_ok()
720 && !tokens[1].is_empty()
721 && tokens[1].chars().all(|c| c.is_ascii_digit())
722 {
723 let month = parse_month(tokens[0]).ok()?;
724 let day = tokens[1].parse::<u32>().ok()?;
725 let end_date = NaiveDate::from_ymd_opt(date_year(start_date), month, day)?;
726 let rest = right_normalized
727 .splitn(3, ' ')
728 .nth(2)
729 .unwrap_or("")
730 .to_string();
731 return Some((end_date, rest));
732 }
733 None
734}
735
736fn validate_utc_tai_segments(segments: &[UtcTaiSegment]) -> Result<(), String> {
737 if segments.is_empty() {
738 return Err("UTC-TAI history parsing produced no segments".into());
739 }
740 for (idx, segment) in segments.iter().enumerate() {
741 if let Some(end_mjd) = segment.end_mjd {
742 if end_mjd <= segment.start_mjd {
743 return Err(format!(
744 "UTC-TAI segment ending at MJD {end_mjd} does not extend past start {}",
745 segment.start_mjd
746 ));
747 }
748 }
749 let Some(next) = segments.get(idx + 1) else {
750 continue;
751 };
752 if next.start_mjd == segment.start_mjd {
753 return Err(format!(
754 "UTC-TAI segment list contains duplicate start MJD {}",
755 segment.start_mjd
756 ));
757 }
758 if next.start_mjd < segment.start_mjd {
759 return Err(format!(
760 "UTC-TAI segment list is not strictly increasing near {} -> {}",
761 segment.start_mjd, next.start_mjd
762 ));
763 }
764 match segment.end_mjd {
765 Some(end_mjd) if end_mjd == next.start_mjd => {}
766 Some(end_mjd) => {
767 return Err(format!(
768 "UTC-TAI segment boundary mismatch near {} -> {}",
769 end_mjd, next.start_mjd
770 ))
771 }
772 None => {
773 return Err(format!(
774 "UTC-TAI segment starting at MJD {} is open-ended before the next segment {}",
775 segment.start_mjd, next.start_mjd
776 ))
777 }
778 }
779 }
780 Ok(())
781}
782
783fn validate_strictly_increasing_mjds(label: &str, points: &[(f64, f64)]) -> Result<(), String> {
784 for window in points.windows(2) {
785 let current = window[0].0;
786 let next = window[1].0;
787 if next == current {
788 return Err(format!(
789 "{label} MJD column contains duplicate entry at {current:.3}"
790 ));
791 }
792 if next < current {
793 return Err(format!(
794 "{label} MJD column is not strictly increasing near {current:.3} -> {next:.3}"
795 ));
796 }
797 }
798 Ok(())
799}
800
801fn validate_eop_points(points: &[EopPoint]) -> Result<(), String> {
802 if points.len() < 2 {
803 return Err("EOP finals parsing produced fewer than two usable rows".into());
804 }
805 for window in points.windows(2) {
806 let current = window[0].mjd;
807 let next = window[1].mjd;
808 if next == current {
809 return Err(format!(
810 "EOP finals MJD column contains duplicate entry at {current}"
811 ));
812 }
813 if next < current {
814 return Err(format!(
815 "EOP finals MJD column is not strictly increasing near {current} -> {next}"
816 ));
817 }
818 if next != current + 1 {
819 return Err(format!(
820 "EOP finals MJD column has a daily gap near {current} -> {next}"
821 ));
822 }
823 }
824 Ok(())
825}
826
827pub fn parse_delta_t_observed(text: &str) -> Result<Vec<(f64, f64)>, String> {
828 let mut points = Vec::new();
829 for raw_line in text.lines() {
830 let parts: Vec<&str> = raw_line.split_whitespace().collect();
831 if parts.len() != 4 {
832 continue;
833 }
834 if !parts[0].chars().all(|c| c.is_ascii_digit()) {
835 continue;
836 }
837 let year = parts[0]
838 .parse::<i32>()
839 .map_err(|err: std::num::ParseIntError| err.to_string())?;
840 let month = parts[1]
841 .parse::<u32>()
842 .map_err(|err: std::num::ParseIntError| err.to_string())?;
843 let day = parts[2]
844 .parse::<u32>()
845 .map_err(|err: std::num::ParseIntError| err.to_string())?;
846 let delta_t = parts[3]
847 .parse::<f64>()
848 .map_err(|err: std::num::ParseFloatError| err.to_string())?;
849 let date = NaiveDate::from_ymd_opt(year, month, day)
850 .ok_or_else(|| format!("invalid date in observed Delta T: {raw_line:?}"))?;
851 points.push((mjd_from_date(date) as f64, delta_t));
852 }
853 if points.is_empty() {
854 return Err("observed Delta T parsing produced no points".into());
855 }
856 validate_strictly_increasing_mjds("observed Delta T", &points)?;
857 Ok(points)
858}
859
860pub fn parse_delta_t_predictions(text: &str) -> Result<Vec<(f64, f64)>, String> {
861 let mut points = Vec::new();
862 for raw_line in text.lines() {
863 let parts: Vec<&str> = raw_line.split_whitespace().collect();
864 if parts.is_empty() || parts[0] == "MJD" || parts.len() < 3 {
865 continue;
866 }
867 let Ok(mjd) = parts[0].parse::<f64>() else {
868 continue;
869 };
870 let Ok(delta_t) = parts[2].parse::<f64>() else {
871 continue;
872 };
873 points.push((mjd, delta_t));
874 }
875 if points.is_empty() {
876 return Err("predicted Delta T parsing produced no points".into());
877 }
878 validate_strictly_increasing_mjds("predicted Delta T", &points)?;
879 Ok(points)
880}
881
882pub fn build_modern_delta_t_points(
883 observed_points: &[(f64, f64)],
884 predicted_points: &[(f64, f64)],
885) -> Result<(Vec<(f64, f64)>, f64), String> {
886 let (last_obs_mjd, last_obs_dt) = *observed_points.last().ok_or("observed Delta T is empty")?;
887 validate_strictly_increasing_mjds("observed Delta T", observed_points)?;
888 validate_strictly_increasing_mjds("predicted Delta T", predicted_points)?;
889 let mut future: Vec<(f64, f64)> = predicted_points
890 .iter()
891 .copied()
892 .filter(|(mjd, _)| *mjd > last_obs_mjd)
893 .collect();
894
895 if !future.is_empty() {
896 let (m0, d0) = future[0];
897 let (m1, d1) = if future.len() >= 2 {
898 future[1]
899 } else {
900 (m0, d0)
901 };
902 let frac = if m1 != m0 {
903 (last_obs_mjd - m0) / (m1 - m0)
904 } else {
905 0.0
906 };
907 let pred_at_stitch = d0 + frac * (d1 - d0);
908 let continuity_offset = last_obs_dt - pred_at_stitch;
909 for point in &mut future {
910 point.1 += continuity_offset;
911 }
912 }
913
914 let mut combined = Vec::with_capacity(observed_points.len() + future.len());
915 combined.extend_from_slice(observed_points);
916 combined.extend_from_slice(&future);
917 if combined.len() < 2 {
918 return Err("modern Delta T series must contain at least two points".into());
919 }
920 validate_strictly_increasing_mjds("modern Delta T", &combined)?;
921 Ok((combined, last_obs_mjd))
922}
923
924pub fn parse_eop_finals(text: &str) -> Result<Vec<EopPoint>, String> {
925 let mut points = Vec::new();
926
927 for line in text.lines() {
928 if line.len() < 68 {
929 continue;
930 }
931 let Some(mjd_f) = col(line, 8, 15).and_then(parse_f64) else {
932 continue;
933 };
934 let mjd = mjd_f.round() as i32;
935 let Some(ut1_flag) = col(line, 58, 58).and_then(parse_flag) else {
936 continue;
937 };
938 if !matches!(ut1_flag, 'I' | 'P') {
939 continue;
940 }
941 let Some(ut1_minus_utc_seconds) = col(line, 59, 68).and_then(parse_f64) else {
942 continue;
943 };
944
945 let pm_flag = col(line, 17, 17).and_then(parse_flag);
946 let nutation_flag = col(line, 96, 96).and_then(parse_flag);
947 points.push(EopPoint {
948 mjd,
949 pm_observed: matches!(pm_flag, Some('I')),
950 ut1_observed: ut1_flag == 'I',
951 nutation_observed: matches!(nutation_flag, Some('I')),
952 pm_xp_arcsec: col(line, 19, 27).and_then(parse_f64),
953 pm_yp_arcsec: col(line, 38, 46).and_then(parse_f64),
954 ut1_minus_utc_seconds,
955 lod_milliseconds: col(line, 80, 86).and_then(parse_f64),
956 dx_milliarcsec: col(line, 98, 106).and_then(parse_f64),
957 dy_milliarcsec: col(line, 117, 125).and_then(parse_f64),
958 });
959 }
960
961 validate_eop_points(&points)?;
962 Ok(points)
963}
964
965pub fn observed_end_mjd(points: &[EopPoint]) -> i32 {
966 points
967 .iter()
968 .rev()
969 .find(|point| point.ut1_observed)
970 .map(|point| point.mjd)
971 .unwrap_or(points[0].mjd)
972}
973
974fn col(line: &str, start_1based: usize, end_1based_inclusive: usize) -> Option<&str> {
975 let start = start_1based.checked_sub(1)?;
976 let end = end_1based_inclusive;
977 if line.len() < end {
978 return None;
979 }
980 Some(&line[start..end])
981}
982
983fn parse_f64(slice: &str) -> Option<f64> {
984 let trimmed = slice.trim();
985 if trimmed.is_empty() {
986 return None;
987 }
988 trimmed.parse::<f64>().ok()
989}
990
991fn parse_flag(slice: &str) -> Option<char> {
992 slice.trim().chars().next()
993}
994
995#[cfg(test)]
996mod tests {
997 use super::*;
998
999 fn sample_utc_tai_history() -> &'static str {
1000 "1961 Jan. 1 - Aug. 1 1.4228180s + (MJD - 37300) x 0.001296s\n\
1001 Aug. 1 - 1962 Jan. 1 1.3728180s + \"\"\n\
1002 1962 Jan. 1 - 10s\n"
1003 }
1004
1005 fn set_field(line: &mut [u8], start_1based: usize, end_1based_inclusive: usize, value: &str) {
1006 let start = start_1based - 1;
1007 let width = end_1based_inclusive - start_1based + 1;
1008 let bytes = value.as_bytes();
1009 assert!(
1010 bytes.len() <= width,
1011 "{value:?} does not fit in width {width}"
1012 );
1013 let offset = width - bytes.len();
1014 line[start + offset..start + offset + bytes.len()].copy_from_slice(bytes);
1015 }
1016
1017 #[allow(clippy::too_many_arguments)]
1018 fn sample_eop_line(
1019 mjd: i32,
1020 ut1_flag: char,
1021 ut1_minus_utc_seconds: f64,
1022 pm_xp_arcsec: Option<f64>,
1023 pm_yp_arcsec: Option<f64>,
1024 lod_milliseconds: Option<f64>,
1025 dx_milliarcsec: Option<f64>,
1026 dy_milliarcsec: Option<f64>,
1027 ) -> String {
1028 let mut line = vec![b' '; 125];
1029 set_field(&mut line, 8, 15, &format!("{:8.2}", mjd as f64));
1030 line[16] = b'I';
1031 if let Some(value) = pm_xp_arcsec {
1032 set_field(&mut line, 19, 27, &format!("{value:>9.6}"));
1033 }
1034 if let Some(value) = pm_yp_arcsec {
1035 set_field(&mut line, 38, 46, &format!("{value:>9.6}"));
1036 }
1037 line[57] = ut1_flag as u8;
1038 set_field(&mut line, 59, 68, &format!("{ut1_minus_utc_seconds:>10.7}"));
1039 if let Some(value) = lod_milliseconds {
1040 set_field(&mut line, 80, 86, &format!("{value:>7.4}"));
1041 }
1042 line[95] = b'I';
1043 if let Some(value) = dx_milliarcsec {
1044 set_field(&mut line, 98, 106, &format!("{value:>9.3}"));
1045 }
1046 if let Some(value) = dy_milliarcsec {
1047 set_field(&mut line, 117, 125, &format!("{value:>9.3}"));
1048 }
1049 String::from_utf8(line).expect("sample EOP line must stay ASCII")
1050 }
1051
1052 #[test]
1053 fn parse_utc_tai_segments_reads_piecewise_rules() {
1054 let segments = parse_utc_tai_segments(sample_utc_tai_history()).unwrap();
1055 assert_eq!(segments.len(), 3);
1056 assert_eq!(segments[0].start_mjd, 37_300);
1057 assert_eq!(segments[0].end_mjd, Some(37_512));
1058 assert_eq!(segments[0].reference_mjd, 37_300.0);
1059 assert_eq!(segments[0].slope_seconds_per_day, 0.001_296);
1060 assert_eq!(segments[1].reference_mjd, segments[0].reference_mjd);
1061 assert_eq!(
1062 segments[1].slope_seconds_per_day,
1063 segments[0].slope_seconds_per_day
1064 );
1065 assert_eq!(segments[2].end_mjd, None);
1066 assert_eq!(segments[2].base_seconds, 10.0);
1067 }
1068
1069 #[test]
1070 fn parse_delta_t_observed_reads_representative_rows() {
1071 let points = parse_delta_t_observed(
1072 "2024 01 01 69.1000\n\
1073 2024 02 01 69.2000\n",
1074 )
1075 .unwrap();
1076 assert_eq!(points.len(), 2);
1077 assert_eq!(
1078 points[0].0,
1079 mjd_from_date(NaiveDate::from_ymd_opt(2024, 1, 1).unwrap()) as f64
1080 );
1081 assert_eq!(points[1].1, 69.2);
1082 }
1083
1084 #[test]
1085 fn parse_delta_t_predictions_reads_representative_rows() {
1086 let points = parse_delta_t_predictions(
1087 "MJD YEAR DELTAT\n\
1088 60310 2024.1 69.4000\n\
1089 60341 2024.2 69.5000\n",
1090 )
1091 .unwrap();
1092 assert_eq!(points, vec![(60_310.0, 69.4), (60_341.0, 69.5)]);
1093 }
1094
1095 #[test]
1096 fn build_modern_delta_t_points_applies_continuity_offset() {
1097 let observed = [(60_000.0, 69.8), (60_030.0, 71.0)];
1098 let predicted = [(60_040.0, 70.0), (60_050.0, 72.0)];
1099 let (combined, observed_end_mjd) =
1100 build_modern_delta_t_points(&observed, &predicted).unwrap();
1101
1102 assert_eq!(observed_end_mjd, 60_030.0);
1103 assert_eq!(combined.len(), 4);
1104 let (m0, d0) = combined[2];
1105 let (m1, d1) = combined[3];
1106 let frac = (observed_end_mjd - m0) / (m1 - m0);
1107 let stitched_value = d0 + frac * (d1 - d0);
1108 assert!((stitched_value - 71.0).abs() < 1e-12);
1109 }
1110
1111 #[test]
1112 fn build_modern_delta_t_points_rejects_duplicate_input_mjds() {
1113 let observed = [(60_000.0, 69.8), (60_000.0, 69.9)];
1114 let predicted = [(60_031.0, 70.0), (60_062.0, 70.2)];
1115 let err = build_modern_delta_t_points(&observed, &predicted).unwrap_err();
1116 assert!(err.contains("observed Delta T"));
1117 assert!(err.contains("duplicate"));
1118 }
1119
1120 #[test]
1121 fn parse_delta_t_predictions_rejects_non_increasing_mjds() {
1122 let err = parse_delta_t_predictions(
1123 "MJD YEAR DELTAT\n\
1124 60341 2024.2 69.5000\n\
1125 60310 2024.1 69.4000\n",
1126 )
1127 .unwrap_err();
1128 assert!(err.contains("predicted Delta T"));
1129 assert!(err.contains("not strictly increasing"));
1130 }
1131
1132 #[test]
1133 fn parse_eop_finals_reads_representative_rows() {
1134 let text = format!(
1135 "{}\n{}\n",
1136 sample_eop_line(
1137 60_000,
1138 'I',
1139 -0.123_456_7,
1140 Some(0.123_456),
1141 Some(-0.234_567),
1142 Some(1.2345),
1143 Some(0.321),
1144 Some(-0.111),
1145 ),
1146 sample_eop_line(60_001, 'P', -0.223_456_7, None, None, None, None, None,),
1147 );
1148 let points = parse_eop_finals(&text).unwrap();
1149 assert_eq!(points.len(), 2);
1150 assert_eq!(points[0].mjd, 60_000);
1151 assert!(points[0].ut1_observed);
1152 assert_eq!(points[0].pm_xp_arcsec, Some(0.123_456));
1153 assert_eq!(points[0].lod_milliseconds, Some(1.2345));
1154 assert_eq!(points[0].dx_milliarcsec, Some(0.321));
1155 assert_eq!(points[1].mjd, 60_001);
1156 assert!(!points[1].ut1_observed);
1157 assert_eq!(points[1].pm_xp_arcsec, None);
1158 assert_eq!(points[1].dx_milliarcsec, None);
1159 }
1160
1161 #[test]
1162 fn parse_eop_finals_rejects_duplicate_mjds() {
1163 let text = format!(
1164 "{}\n{}\n",
1165 sample_eop_line(60_000, 'I', -0.1, Some(0.1), Some(0.2), None, None, None),
1166 sample_eop_line(60_000, 'P', -0.2, Some(0.1), Some(0.2), None, None, None),
1167 );
1168 let err = parse_eop_finals(&text).unwrap_err();
1169 assert!(err.contains("duplicate"));
1170 }
1171
1172 #[test]
1173 fn parse_eop_finals_rejects_daily_gaps() {
1174 let text = format!(
1175 "{}\n{}\n",
1176 sample_eop_line(60_000, 'I', -0.1, Some(0.1), Some(0.2), None, None, None),
1177 sample_eop_line(60_002, 'P', -0.2, Some(0.1), Some(0.2), None, None, None),
1178 );
1179 let err = parse_eop_finals(&text).unwrap_err();
1180 assert!(err.contains("daily gap"));
1181 }
1182}