1use chrono::Utc;
4use serde::{Deserialize, Serialize};
5use sha2::Digest as _;
6use std::io::Write;
7use std::path::{Path, PathBuf};
8
9#[derive(Debug, Clone, Serialize, Deserialize)]
11pub struct DailyPublication {
12 pub date: String,
14 pub root_hash: String,
16 pub entry_count: u64,
18 pub hourly_roots: Vec<String>,
20 pub previous_day_root: String,
22 pub created_at: String,
24 pub signature: Option<PublicationSignature>,
26 pub tsa_timestamp: Option<TsaTimestamp>,
28}
29
30impl DailyPublication {
31 pub fn to_canonical_json_bytes(&self) -> Result<Vec<u8>, crate::error::LogError> {
33 serde_json::to_vec(self)
34 .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
35 }
36
37 pub fn to_canonical_json(&self) -> Result<String, crate::error::LogError> {
39 let bytes = self.to_canonical_json_bytes()?;
40 String::from_utf8(bytes)
41 .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
42 }
43
44 pub fn to_canonical_json_gzip(&self) -> Result<Vec<u8>, crate::error::LogError> {
46 let json = self.to_canonical_json_bytes()?;
47 let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
48 encoder
49 .write_all(&json)
50 .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
51 encoder
52 .finish()
53 .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
54 }
55
56 pub fn publication_basename(&self) -> String {
58 let root_prefix = self.root_hash.get(..16).unwrap_or(&self.root_hash);
59 format!("daily-publication-{}-{}", self.date, root_prefix)
60 }
61
62 pub fn recompute_root_hash(&self) -> String {
64 PublicationService::compute_merkle_root(&self.hourly_roots)
65 }
66
67 pub fn verify_root_hash(&self) -> bool {
69 self.root_hash == self.recompute_root_hash()
70 }
71
72 pub fn write_canonical_json_file<P: AsRef<Path>>(
74 &self,
75 path: P,
76 ) -> Result<(), crate::error::LogError> {
77 let bytes = self.to_canonical_json_bytes()?;
78 std::fs::write(path, bytes)
79 .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
80 }
81
82 pub fn write_canonical_json_gzip_file<P: AsRef<Path>>(
84 &self,
85 path: P,
86 ) -> Result<(), crate::error::LogError> {
87 let bytes = self.to_canonical_json_gzip()?;
88 std::fs::write(path, bytes)
89 .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
90 }
91}
92
93#[derive(Debug, Clone, Serialize, Deserialize)]
95pub struct PublicationSignature {
96 pub algorithm: String,
97 pub key_id: String,
98 pub value: String,
99}
100
101#[derive(Debug, Clone, Serialize, Deserialize)]
103pub struct TsaTimestamp {
104 pub tsa_url: String,
105 pub timestamp: String,
106 pub token: String,
107}
108
109#[derive(Debug, Clone, Serialize, Deserialize)]
111pub struct TsaTokenInspection {
112 pub token_present: bool,
113 pub token_base64_valid: bool,
114 pub token_der_nonempty: bool,
115 pub extracted_timestamp: Option<String>,
116}
117
118#[derive(Debug, Clone, Serialize, Deserialize)]
120pub struct TsaCmsVerification {
121 pub verified: bool,
122 pub extracted_timestamp: Option<String>,
123}
124
125#[derive(Debug, thiserror::Error)]
127pub enum TsaCmsVerifyError {
128 #[error("TSA CMS verification backend unavailable: {0}")]
129 BackendUnavailable(String),
130 #[error("TSA token missing")]
131 TokenMissing,
132 #[error("TSA token base64 decode failed: {0}")]
133 TokenBase64(String),
134 #[error("TSA token PKCS#7 parse failed: {0}")]
135 Pkcs7Parse(String),
136 #[error("TSA trust store error: {0}")]
137 TrustStore(String),
138 #[error("TSA CMS verification failed: {0}")]
139 Verify(String),
140}
141
142pub struct PublicationService {
144 previous_day_root: Option<String>,
146}
147
148impl Default for PublicationService {
149 fn default() -> Self {
150 Self::new()
151 }
152}
153
154impl PublicationService {
155 pub fn new() -> Self {
157 PublicationService {
158 previous_day_root: None,
159 }
160 }
161
162 pub fn create_daily_publication(
164 &self,
165 hourly_roots: &[String],
166 entry_count: u64,
167 ) -> DailyPublication {
168 let date = Utc::now().format("%Y-%m-%d").to_string();
169 let previous = self.previous_day_root.clone().unwrap_or_else(|| {
170 "0000000000000000000000000000000000000000000000000000000000000000".to_string()
171 });
172
173 let root_hash = Self::compute_merkle_root(hourly_roots);
175
176 DailyPublication {
177 date,
178 root_hash,
179 entry_count,
180 hourly_roots: hourly_roots.to_vec(),
181 previous_day_root: previous,
182 created_at: Utc::now().to_rfc3339(),
183 signature: None,
184 tsa_timestamp: None,
185 }
186 }
187
188 fn compute_merkle_root(hashes: &[String]) -> String {
190 if hashes.is_empty() {
191 return "0000000000000000000000000000000000000000000000000000000000000000".to_string();
192 }
193
194 use sha2::{Digest, Sha256};
195
196 let mut current: Vec<String> = hashes.to_vec();
197
198 while current.len() > 1 {
199 let mut next = Vec::new();
200
201 for chunk in current.chunks(2) {
202 if chunk.len() == 2 {
203 let mut hasher = Sha256::new();
204 hasher.update(chunk[0].as_bytes());
205 hasher.update(chunk[1].as_bytes());
206 next.push(format!("{:x}", hasher.finalize()));
207 } else {
208 next.push(chunk[0].clone());
209 }
210 }
211
212 current = next;
213 }
214
215 current[0].clone()
216 }
217
218 pub fn sign_publication(&mut self, publication: &mut DailyPublication, signature: &[u8]) {
220 self.sign_publication_with_metadata(
221 publication,
222 signature,
223 "RSA-PSS-SHA256",
224 "rnbc-audit-sig-2026",
225 );
226 }
227
228 pub fn sign_publication_with_metadata(
230 &mut self,
231 publication: &mut DailyPublication,
232 signature: &[u8],
233 algorithm: &str,
234 key_id: &str,
235 ) {
236 publication.signature = Some(PublicationSignature {
237 algorithm: algorithm.to_string(),
238 key_id: key_id.to_string(),
239 value: base64_encode(signature),
240 });
241
242 self.previous_day_root = Some(publication.root_hash.clone());
244 }
245
246 pub fn publish_to_filesystem<P: AsRef<Path>>(
248 &self,
249 publication: &DailyPublication,
250 directory: P,
251 write_gzip: bool,
252 ) -> Result<FilesystemPublication, crate::error::LogError> {
253 let dir = directory.as_ref();
254 std::fs::create_dir_all(dir)
255 .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))?;
256
257 let basename = publication.publication_basename();
258 let json_path = dir.join(format!("{basename}.json"));
259 publication.write_canonical_json_file(&json_path)?;
260
261 let gzip_path = if write_gzip {
262 let path = dir.join(format!("{basename}.json.gz"));
263 publication.write_canonical_json_gzip_file(&path)?;
264 Some(path)
265 } else {
266 None
267 };
268
269 Ok(FilesystemPublication {
270 json_path,
271 gzip_path,
272 })
273 }
274
275 pub async fn add_tsa_timestamp(
282 &mut self,
283 publication: &mut DailyPublication,
284 tsa_url: &str,
285 ) -> Result<(), TsaError> {
286 let hash_to_timestamp = &publication.root_hash;
288
289 let timestamp_request = TsaRequest {
292 hash: hash_to_timestamp.clone(),
293 algorithm: "SHA256".to_string(),
294 nonce: uuid::Uuid::new_v4().to_string(),
295 };
296
297 let response = self.request_timestamp(tsa_url, ×tamp_request).await?;
299
300 publication.tsa_timestamp = Some(TsaTimestamp {
301 tsa_url: tsa_url.to_string(),
302 timestamp: response.timestamp,
303 token: response.token,
304 });
305
306 tracing::info!(
307 "TSA timestamp added for publication {} at {}",
308 publication.date,
309 publication
310 .tsa_timestamp
311 .as_ref()
312 .map(|t| t.timestamp.as_str())
313 .map_or("unknown", |v| v)
314 );
315
316 Ok(())
317 }
318
319 async fn request_timestamp(
325 &self,
326 tsa_url: &str,
327 request: &TsaRequest,
328 ) -> Result<TsaResponse, TsaError> {
329 if tsa_url.starts_with("mock://") {
330 tracing::warn!("Using mock TSA timestamp provider: {}", tsa_url);
331 return Ok(TsaResponse {
332 timestamp: chrono::Utc::now().to_rfc3339(),
333 token: format!("mock-sha256={}", request.hash),
334 tsa_certificate: "placeholder".to_string(),
335 });
336 }
337
338 if !(tsa_url.starts_with("https://") || tsa_url.starts_with("http://")) {
339 return Err(TsaError::UnsupportedScheme(tsa_url.to_string()));
340 }
341
342 let digest_bytes = hex_decode(&request.hash).map_err(TsaError::Encoding)?;
343 let body = build_rfc3161_timestamp_query(&digest_bytes, &request.nonce)?;
344
345 tracing::info!("Requesting TSA token from {}", tsa_url);
346 let client = reqwest::Client::new();
347 let resp = client
348 .post(tsa_url)
349 .header("Content-Type", "application/timestamp-query")
350 .header("Accept", "application/timestamp-reply")
351 .body(body)
352 .send()
353 .await?;
354
355 let status_code = resp.status();
356 if !status_code.is_success() {
357 return Err(TsaError::Server(format!(
358 "HTTP {} from TSA endpoint",
359 status_code
360 )));
361 }
362
363 let date_header = resp
364 .headers()
365 .get(reqwest::header::DATE)
366 .and_then(|v| v.to_str().ok())
367 .map(str::to_string);
368 let bytes = resp.bytes().await?;
369
370 let tsa_reply = parse_timestamp_response(&bytes)?;
371 if tsa_reply.status != 0 && tsa_reply.status != 1 {
372 return Err(TsaError::Server(format!(
373 "TSA rejected request with status {}",
374 tsa_reply.status
375 )));
376 }
377
378 let token_der = tsa_reply
379 .time_stamp_token_der
380 .ok_or(TsaError::InvalidResponse)?;
381
382 let timestamp = extract_generalized_time_rfc3339(&token_der)
385 .or_else(|| date_header.and_then(parse_http_date_to_rfc3339))
386 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
387
388 Ok(TsaResponse {
389 timestamp,
390 token: base64_encode(&token_der),
391 tsa_certificate: "unparsed".to_string(),
392 })
393 }
394}
395
396impl TsaTimestamp {
397 pub fn inspect_token(&self) -> TsaTokenInspection {
401 use base64::{engine::general_purpose::STANDARD, Engine as _};
402
403 if self.token.is_empty() {
404 return TsaTokenInspection {
405 token_present: false,
406 token_base64_valid: false,
407 token_der_nonempty: false,
408 extracted_timestamp: None,
409 };
410 }
411
412 let der = match STANDARD.decode(self.token.as_bytes()) {
413 Ok(v) => v,
414 Err(_) => {
415 return TsaTokenInspection {
416 token_present: true,
417 token_base64_valid: false,
418 token_der_nonempty: false,
419 extracted_timestamp: None,
420 };
421 }
422 };
423
424 let extracted_timestamp = extract_generalized_time_rfc3339(&der);
425 TsaTokenInspection {
426 token_present: true,
427 token_base64_valid: true,
428 token_der_nonempty: !der.is_empty(),
429 extracted_timestamp,
430 }
431 }
432
433 #[cfg(feature = "tsa-cms-openssl")]
438 pub fn verify_cms_signature_with_pem_roots(
439 &self,
440 trust_store_pem: &[u8],
441 ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
442 use base64::{engine::general_purpose::STANDARD, Engine as _};
443 use openssl::pkcs7::{Pkcs7, Pkcs7Flags};
444 use openssl::stack::Stack;
445 use openssl::x509::{store::X509StoreBuilder, X509};
446
447 if self.token.is_empty() {
448 return Err(TsaCmsVerifyError::TokenMissing);
449 }
450
451 let der = STANDARD
452 .decode(self.token.as_bytes())
453 .map_err(|e| TsaCmsVerifyError::TokenBase64(e.to_string()))?;
454 let extracted_timestamp = extract_generalized_time_rfc3339(&der);
455
456 let pkcs7 =
457 Pkcs7::from_der(&der).map_err(|e| TsaCmsVerifyError::Pkcs7Parse(e.to_string()))?;
458
459 let certs = X509::stack_from_pem(trust_store_pem)
460 .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
461 let mut store_builder =
462 X509StoreBuilder::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
463 for cert in certs {
464 store_builder
465 .add_cert(cert)
466 .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
467 }
468 let store = store_builder.build();
469
470 let cert_stack: Stack<X509> =
471 Stack::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
472 let mut out = Vec::<u8>::new();
473 pkcs7
474 .verify(
475 &cert_stack,
476 &store,
477 None,
478 Some(&mut out),
479 Pkcs7Flags::empty(),
480 )
481 .map_err(|e| TsaCmsVerifyError::Verify(e.to_string()))?;
482
483 Ok(TsaCmsVerification {
484 verified: true,
485 extracted_timestamp,
486 })
487 }
488
489 #[cfg(not(feature = "tsa-cms-openssl"))]
490 pub fn verify_cms_signature_with_pem_roots(
491 &self,
492 _trust_store_pem: &[u8],
493 ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
494 Err(TsaCmsVerifyError::BackendUnavailable(
495 "immutable-logging compiled without feature `tsa-cms-openssl`".to_string(),
496 ))
497 }
498}
499
500#[derive(Debug, Clone)]
502pub struct FilesystemPublication {
503 pub json_path: PathBuf,
504 pub gzip_path: Option<PathBuf>,
505}
506
507#[derive(Debug, Clone, Serialize, Deserialize)]
509struct TsaRequest {
510 hash: String,
511 algorithm: String,
512 nonce: String,
513}
514
515#[derive(Debug, Clone, Serialize, Deserialize)]
517struct TsaResponse {
518 timestamp: String,
519 token: String,
520 tsa_certificate: String,
521}
522
523#[derive(Debug, thiserror::Error)]
525pub enum TsaError {
526 #[error("Network error: {0}")]
527 Network(#[from] reqwest::Error),
528
529 #[error("Encoding error: {0}")]
530 Encoding(String),
531
532 #[error("TSA server error: {0}")]
533 Server(String),
534
535 #[error("Unsupported TSA URL scheme: {0}")]
536 UnsupportedScheme(String),
537
538 #[error("Invalid response from TSA")]
539 InvalidResponse,
540}
541
542fn base64_encode(data: &[u8]) -> String {
544 use base64::{engine::general_purpose::STANDARD, Engine as _};
545 STANDARD.encode(data)
546}
547
548fn hex_decode(s: &str) -> Result<Vec<u8>, String> {
549 if !s.len().is_multiple_of(2) {
550 return Err("Invalid hex length".to_string());
551 }
552 (0..s.len())
553 .step_by(2)
554 .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map_err(|_| "Invalid hex".to_string()))
555 .collect()
556}
557
558fn build_rfc3161_timestamp_query(
559 message_digest: &[u8],
560 nonce_text: &str,
561) -> Result<Vec<u8>, TsaError> {
562 if message_digest.len() != 32 {
564 return Err(TsaError::Encoding(format!(
565 "expected SHA-256 digest (32 bytes), got {}",
566 message_digest.len()
567 )));
568 }
569
570 let nonce_hash = sha2::Sha256::digest(nonce_text.as_bytes());
571 let nonce = der_integer_positive(&nonce_hash[..16]);
572
573 let algorithm_identifier = der_sequence(&[
574 der_oid(&[2, 16, 840, 1, 101, 3, 4, 2, 1]), der_null(),
576 ]);
577 let message_imprint = der_sequence(&[algorithm_identifier, der_octet_string(message_digest)]);
578
579 Ok(der_sequence(&[
580 der_integer_u64(1), message_imprint,
582 nonce, der_boolean(true), ]))
585}
586
587struct ParsedTsaResponse {
588 status: i64,
589 time_stamp_token_der: Option<Vec<u8>>,
590}
591
592fn parse_timestamp_response(bytes: &[u8]) -> Result<ParsedTsaResponse, TsaError> {
593 let (outer_tag, outer_len, outer_hdr) = der_read_tlv(bytes, 0)?;
594 if outer_tag != 0x30 || outer_hdr + outer_len > bytes.len() {
595 return Err(TsaError::InvalidResponse);
596 }
597 let outer = &bytes[outer_hdr..outer_hdr + outer_len];
598
599 let (status_tag, status_len, status_hdr) = der_read_tlv(outer, 0)?;
600 if status_tag != 0x30 || status_hdr + status_len > outer.len() {
601 return Err(TsaError::InvalidResponse);
602 }
603 let status_seq = &outer[status_hdr..status_hdr + status_len];
604 let (int_tag, int_len, int_hdr) = der_read_tlv(status_seq, 0)?;
605 if int_tag != 0x02 || int_hdr + int_len > status_seq.len() {
606 return Err(TsaError::InvalidResponse);
607 }
608 let status = der_parse_integer_i64(&status_seq[int_hdr..int_hdr + int_len])?;
609
610 let next = status_hdr + status_len;
611 let time_stamp_token_der = if next < outer.len() {
612 let (_tag, len, hdr) = der_read_tlv(outer, next)?;
613 Some(outer[next..next + hdr + len].to_vec())
614 } else {
615 None
616 };
617
618 Ok(ParsedTsaResponse {
619 status,
620 time_stamp_token_der,
621 })
622}
623
624fn extract_generalized_time_rfc3339(bytes: &[u8]) -> Option<String> {
625 let mut i = 0usize;
626 while i + 2 <= bytes.len() {
627 if bytes[i] == 0x18 {
628 let (tag, len, hdr) = der_read_tlv(bytes, i).ok()?;
629 if tag != 0x18 || i + hdr + len > bytes.len() {
630 return None;
631 }
632 let s = std::str::from_utf8(&bytes[i + hdr..i + hdr + len]).ok()?;
633 if let Some(trimmed) = s.strip_suffix('Z') {
634 if let Ok(naive) = chrono::NaiveDateTime::parse_from_str(trimmed, "%Y%m%d%H%M%S") {
635 let dt = chrono::DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
636 return Some(dt.to_rfc3339());
637 }
638 }
639 }
640 i += 1;
641 }
642 None
643}
644
645fn parse_http_date_to_rfc3339(value: String) -> Option<String> {
646 let dt = chrono::DateTime::parse_from_rfc2822(&value).ok()?;
647 Some(dt.with_timezone(&Utc).to_rfc3339())
648}
649
650fn der_read_tlv(input: &[u8], offset: usize) -> Result<(u8, usize, usize), TsaError> {
651 if offset + 2 > input.len() {
652 return Err(TsaError::InvalidResponse);
653 }
654 let tag = input[offset];
655 let first_len = input[offset + 1];
656 if first_len & 0x80 == 0 {
657 let len = first_len as usize;
658 Ok((tag, len, 2))
659 } else {
660 let n = (first_len & 0x7f) as usize;
661 if n == 0 || n > 4 || offset + 2 + n > input.len() {
662 return Err(TsaError::InvalidResponse);
663 }
664 let mut len = 0usize;
665 for b in &input[offset + 2..offset + 2 + n] {
666 len = (len << 8) | (*b as usize);
667 }
668 Ok((tag, len, 2 + n))
669 }
670}
671
672fn der_parse_integer_i64(bytes: &[u8]) -> Result<i64, TsaError> {
673 if bytes.is_empty() || bytes.len() > 8 {
674 return Err(TsaError::InvalidResponse);
675 }
676 let mut v: i64 = 0;
677 for b in bytes {
678 v = (v << 8) | (*b as i64);
679 }
680 Ok(v)
681}
682
683fn der_len(len: usize) -> Vec<u8> {
684 if len < 128 {
685 return vec![len as u8];
686 }
687 let mut tmp = Vec::new();
688 let mut n = len;
689 while n > 0 {
690 tmp.push((n & 0xff) as u8);
691 n >>= 8;
692 }
693 tmp.reverse();
694 let mut out = vec![0x80 | (tmp.len() as u8)];
695 out.extend(tmp);
696 out
697}
698
699fn der_wrap(tag: u8, value: &[u8]) -> Vec<u8> {
700 let mut out = vec![tag];
701 out.extend(der_len(value.len()));
702 out.extend(value);
703 out
704}
705
706fn der_sequence(parts: &[Vec<u8>]) -> Vec<u8> {
707 let mut content = Vec::new();
708 for part in parts {
709 content.extend(part);
710 }
711 der_wrap(0x30, &content)
712}
713
714fn der_null() -> Vec<u8> {
715 vec![0x05, 0x00]
716}
717
718fn der_boolean(v: bool) -> Vec<u8> {
719 vec![0x01, 0x01, if v { 0xff } else { 0x00 }]
720}
721
722fn der_integer_u64(v: u64) -> Vec<u8> {
723 let mut bytes = if v == 0 {
724 vec![0]
725 } else {
726 let mut tmp = Vec::new();
727 let mut n = v;
728 while n > 0 {
729 tmp.push((n & 0xff) as u8);
730 n >>= 8;
731 }
732 tmp.reverse();
733 tmp
734 };
735 if bytes[0] & 0x80 != 0 {
736 bytes.insert(0, 0);
737 }
738 der_wrap(0x02, &bytes)
739}
740
741fn der_integer_positive(bytes: &[u8]) -> Vec<u8> {
742 let mut v = bytes.to_vec();
743 while v.first() == Some(&0) && v.len() > 1 {
744 v.remove(0);
745 }
746 if v.first().map(|b| b & 0x80 != 0).unwrap_or(false) {
747 v.insert(0, 0);
748 }
749 der_wrap(0x02, &v)
750}
751
752fn der_octet_string(bytes: &[u8]) -> Vec<u8> {
753 der_wrap(0x04, bytes)
754}
755
756fn der_oid(oid: &[u32]) -> Vec<u8> {
757 let mut out = Vec::new();
758 if oid.len() < 2 {
759 return der_wrap(0x06, &out);
760 }
761 out.push((oid[0] * 40 + oid[1]) as u8);
762 for &arc in &oid[2..] {
763 let mut stack = [0u8; 5];
764 let mut idx = stack.len();
765 let mut n = arc;
766 stack[idx - 1] = (n & 0x7f) as u8;
767 idx -= 1;
768 n >>= 7;
769 while n > 0 {
770 stack[idx - 1] = 0x80 | ((n & 0x7f) as u8);
771 idx -= 1;
772 n >>= 7;
773 }
774 out.extend(&stack[idx..]);
775 }
776 der_wrap(0x06, &out)
777}
778
779#[cfg(test)]
780mod tests {
781 use super::*;
782 use base64::{engine::general_purpose::STANDARD, Engine as _};
783 use std::io::Read;
784 use tempfile::tempdir;
785
786 #[test]
787 fn test_daily_publication_and_signature_chain() {
788 let mut service = PublicationService::new();
789 let hourly_roots = vec!["a".repeat(64), "b".repeat(64)];
790
791 let mut day1 = service.create_daily_publication(&hourly_roots, 42);
792 assert_eq!(day1.entry_count, 42);
793 assert_eq!(day1.hourly_roots.len(), 2);
794 assert_eq!(day1.previous_day_root, "0".repeat(64));
795 assert!(day1.signature.is_none());
796
797 service.sign_publication(&mut day1, b"sig");
798 let sig = day1.signature.as_ref().expect("signature set");
799 assert_eq!(sig.algorithm, "RSA-PSS-SHA256");
800 assert_eq!(sig.value, STANDARD.encode(b"sig"));
801
802 let day2 = service.create_daily_publication(&hourly_roots, 1);
803 assert_eq!(day2.previous_day_root, day1.root_hash);
804 }
805
806 #[test]
807 fn test_add_tsa_timestamp_mock_only() {
808 let mut service = PublicationService::new();
809 let hourly_roots = vec!["c".repeat(64)];
810 let mut publication = service.create_daily_publication(&hourly_roots, 1);
811
812 let rt = tokio::runtime::Builder::new_current_thread()
813 .build()
814 .expect("runtime");
815
816 rt.block_on(async {
817 service
818 .add_tsa_timestamp(&mut publication, "mock://tsa")
819 .await
820 .expect("mock TSA works");
821 });
822
823 let tsa = publication
824 .tsa_timestamp
825 .as_ref()
826 .expect("tsa timestamp set");
827 assert_eq!(tsa.tsa_url, "mock://tsa");
828 assert!(tsa.token.starts_with("mock-sha256="));
829 }
830
831 #[test]
832 fn test_add_tsa_timestamp_rejects_non_mock() {
833 let mut service = PublicationService::new();
834 let hourly_roots = vec!["d".repeat(64)];
835 let mut publication = service.create_daily_publication(&hourly_roots, 1);
836
837 let rt = tokio::runtime::Builder::new_current_thread()
838 .build()
839 .expect("runtime");
840
841 let err = rt.block_on(async {
842 service
843 .add_tsa_timestamp(&mut publication, "https://tsa.example")
844 .await
845 .expect_err("network call should fail for placeholder endpoint")
846 });
847
848 match err {
849 TsaError::Server(_) | TsaError::Network(_) => {}
850 other => panic!("unexpected error: {other}"),
851 }
852 assert!(publication.tsa_timestamp.is_none());
853 }
854
855 #[test]
856 fn test_build_rfc3161_query_der_contains_sha256_oid() {
857 let digest = [0x11u8; 32];
858 let req = build_rfc3161_timestamp_query(&digest, "nonce").expect("query");
859 let oid = [
861 0x06, 0x09, 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01,
862 ];
863 assert!(req.windows(oid.len()).any(|w| w == oid));
864 }
865
866 #[test]
867 fn test_parse_timestamp_response_status_only() {
868 let resp = [0x30, 0x05, 0x30, 0x03, 0x02, 0x01, 0x00];
870 let parsed = parse_timestamp_response(&resp).expect("parse");
871 assert_eq!(parsed.status, 0);
872 assert!(parsed.time_stamp_token_der.is_none());
873 }
874
875 #[test]
876 fn test_extract_generalized_time_best_effort() {
877 let mut bytes = vec![0x18, 0x0f];
879 bytes.extend_from_slice(b"20260226083045Z");
880 let ts = extract_generalized_time_rfc3339(&bytes).expect("timestamp");
881 assert!(ts.starts_with("2026-02-26T08:30:45"));
882 }
883
884 #[test]
885 fn test_canonical_json_export_is_deterministic() {
886 let service = PublicationService::new();
887 let publication = service.create_daily_publication(&["e".repeat(64)], 7);
888
889 let json1 = publication.to_canonical_json().expect("json1");
890 let json2 = publication.to_canonical_json().expect("json2");
891
892 assert_eq!(json1, json2);
893 assert!(!json1.contains('\n'));
894 assert!(json1.contains("\"entry_count\":7"));
895 assert!(json1.contains("\"hourly_roots\""));
896 }
897
898 #[test]
899 fn test_canonical_json_gzip_roundtrip() {
900 let service = PublicationService::new();
901 let publication = service.create_daily_publication(&["f".repeat(64)], 3);
902
903 let original = publication.to_canonical_json_bytes().expect("original");
904 let compressed = publication.to_canonical_json_gzip().expect("gzip");
905 assert!(!compressed.is_empty());
906
907 let mut decoder = flate2::read::GzDecoder::new(compressed.as_slice());
908 let mut decompressed = Vec::new();
909 decoder.read_to_end(&mut decompressed).expect("decompress");
910
911 assert_eq!(decompressed, original);
912 }
913
914 #[test]
915 fn test_publication_basename_is_stable() {
916 let service = PublicationService::new();
917 let publication = service.create_daily_publication(&["bb".repeat(32)], 1);
918 let base = publication.publication_basename();
919
920 assert!(base.starts_with("daily-publication-"));
921 assert!(base.contains(&publication.date));
922 assert!(base.ends_with(&publication.root_hash[..16]));
923 }
924
925 #[test]
926 fn test_verify_root_hash_detects_tamper() {
927 let service = PublicationService::new();
928 let mut publication =
929 service.create_daily_publication(&["aa".repeat(32), "bb".repeat(32)], 2);
930 assert!(publication.verify_root_hash());
931
932 publication.hourly_roots.push("cc".repeat(32));
933 assert!(!publication.verify_root_hash());
934 }
935
936 #[test]
937 fn test_tsa_token_inspection() {
938 let tsa = TsaTimestamp {
939 tsa_url: "https://tsa.example".to_string(),
940 timestamp: "2026-02-26T00:00:00Z".to_string(),
941 token: base64_encode(&[
942 0x18, 0x0f, b'2', b'0', b'2', b'6', b'0', b'2', b'2', b'6', b'0', b'8', b'3', b'0',
943 b'4', b'5', b'Z',
944 ]),
945 };
946 let inspected = tsa.inspect_token();
947 assert!(inspected.token_present);
948 assert!(inspected.token_base64_valid);
949 assert!(inspected.token_der_nonempty);
950 assert!(inspected.extracted_timestamp.is_some());
951
952 let bad = TsaTimestamp {
953 tsa_url: "https://tsa.example".to_string(),
954 timestamp: "2026-02-26T00:00:00Z".to_string(),
955 token: "%%%".to_string(),
956 };
957 let bad_inspected = bad.inspect_token();
958 assert!(bad_inspected.token_present);
959 assert!(!bad_inspected.token_base64_valid);
960 }
961
962 #[cfg(feature = "tsa-cms-openssl")]
963 #[test]
964 fn test_tsa_cms_verify_rejects_invalid_base64() {
965 let tsa = TsaTimestamp {
966 tsa_url: "https://tsa.example".to_string(),
967 timestamp: "2026-02-26T00:00:00Z".to_string(),
968 token: "%%%".to_string(),
969 };
970
971 let err = tsa
972 .verify_cms_signature_with_pem_roots(b"")
973 .expect_err("invalid base64 must fail");
974 match err {
975 TsaCmsVerifyError::TokenBase64(_) => {}
976 other => panic!("unexpected error: {other}"),
977 }
978 }
979
980 #[cfg(feature = "tsa-cms-openssl")]
981 #[test]
982 fn test_tsa_cms_verify_rejects_non_pkcs7_der() {
983 let tsa = TsaTimestamp {
984 tsa_url: "https://tsa.example".to_string(),
985 timestamp: "2026-02-26T00:00:00Z".to_string(),
986 token: base64_encode(&[0x30, 0x03, 0x02, 0x01, 0x00]),
987 };
988
989 let err = tsa
990 .verify_cms_signature_with_pem_roots(b"")
991 .expect_err("non-pkcs7 der must fail");
992 match err {
993 TsaCmsVerifyError::Pkcs7Parse(_) | TsaCmsVerifyError::TrustStore(_) => {}
994 other => panic!("unexpected error: {other}"),
995 }
996 }
997
998 #[cfg(not(feature = "tsa-cms-openssl"))]
999 #[test]
1000 fn test_tsa_cms_verify_reports_backend_unavailable_without_feature() {
1001 let tsa = TsaTimestamp {
1002 tsa_url: "https://tsa.example".to_string(),
1003 timestamp: "2026-02-26T00:00:00Z".to_string(),
1004 token: "%%%".to_string(),
1005 };
1006
1007 let err = tsa
1008 .verify_cms_signature_with_pem_roots(b"")
1009 .expect_err("backend should be unavailable without feature");
1010 match err {
1011 TsaCmsVerifyError::BackendUnavailable(_) => {}
1012 other => panic!("unexpected error: {other}"),
1013 }
1014 }
1015
1016 #[test]
1017 fn test_publish_to_filesystem_writes_json_and_gzip() {
1018 let tmp = tempdir().expect("tempdir");
1019 let service = PublicationService::new();
1020 let publication = service.create_daily_publication(&["aa".repeat(32)], 11);
1021
1022 let written = service
1023 .publish_to_filesystem(&publication, tmp.path(), true)
1024 .expect("publish");
1025
1026 assert!(written.json_path.exists());
1027 let gzip_path = written.gzip_path.as_ref().expect("gzip path");
1028 assert!(gzip_path.exists());
1029
1030 let json_bytes = std::fs::read(&written.json_path).expect("json bytes");
1031 assert_eq!(
1032 json_bytes,
1033 publication
1034 .to_canonical_json_bytes()
1035 .expect("canonical json")
1036 );
1037
1038 let gz_bytes = std::fs::read(gzip_path).expect("gzip bytes");
1039 let mut decoder = flate2::read::GzDecoder::new(gz_bytes.as_slice());
1040 let mut out = Vec::new();
1041 decoder.read_to_end(&mut out).expect("decompress");
1042 assert_eq!(out, json_bytes);
1043 }
1044}