Skip to main content

immutable_logging/
publication.rs

1//! Publication - Daily audit publication
2
3use chrono::Utc;
4use serde::{Deserialize, Serialize};
5use serde_json::Value;
6use sha2::Digest as _;
7use std::io::Write;
8use std::path::{Path, PathBuf};
9
10/// Daily audit publication
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct DailyPublication {
13    /// Publication date
14    pub date: String,
15    /// Root hash of all hourly roots
16    pub root_hash: String,
17    /// Total entry count
18    pub entry_count: u64,
19    /// Hourly root hashes
20    pub hourly_roots: Vec<String>,
21    /// Previous day root (for chaining)
22    pub previous_day_root: String,
23    /// Creation timestamp
24    pub created_at: String,
25    /// Signature
26    pub signature: Option<PublicationSignature>,
27    /// TSA timestamp
28    pub tsa_timestamp: Option<TsaTimestamp>,
29}
30
31impl DailyPublication {
32    /// Export as canonical deterministic JSON bytes.
33    pub fn to_canonical_json_bytes(&self) -> Result<Vec<u8>, crate::error::LogError> {
34        let canonical = canonical_publication_json_value(self)?;
35        canonical_json_bytes(&canonical)
36    }
37
38    /// Export as compact deterministic JSON string.
39    pub fn to_canonical_json(&self) -> Result<String, crate::error::LogError> {
40        let bytes = self.to_canonical_json_bytes()?;
41        String::from_utf8(bytes)
42            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
43    }
44
45    /// Export as gzip-compressed canonical JSON.
46    pub fn to_canonical_json_gzip(&self) -> Result<Vec<u8>, crate::error::LogError> {
47        let json = self.to_canonical_json_bytes()?;
48        let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
49        encoder
50            .write_all(&json)
51            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
52        encoder
53            .finish()
54            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
55    }
56
57    /// Build a deterministic basename suitable for filesystem/object publication backends.
58    pub fn publication_basename(&self) -> String {
59        let root_prefix = self.root_hash.get(..16).unwrap_or(&self.root_hash);
60        format!("daily-publication-{}-{}", self.date, root_prefix)
61    }
62
63    /// Recompute the publication root from `hourly_roots`.
64    pub fn recompute_root_hash(&self) -> String {
65        PublicationService::compute_merkle_root(&self.hourly_roots)
66    }
67
68    /// Check whether the stored `root_hash` matches the recomputed value.
69    pub fn verify_root_hash(&self) -> bool {
70        self.root_hash == self.recompute_root_hash()
71    }
72
73    /// Write canonical JSON to a file path.
74    pub fn write_canonical_json_file<P: AsRef<Path>>(
75        &self,
76        path: P,
77    ) -> Result<(), crate::error::LogError> {
78        let bytes = self.to_canonical_json_bytes()?;
79        std::fs::write(path, bytes)
80            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
81    }
82
83    /// Write gzip-compressed canonical JSON to a file path.
84    pub fn write_canonical_json_gzip_file<P: AsRef<Path>>(
85        &self,
86        path: P,
87    ) -> Result<(), crate::error::LogError> {
88        let bytes = self.to_canonical_json_gzip()?;
89        std::fs::write(path, bytes)
90            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
91    }
92}
93
94/// Publication signature
95#[derive(Debug, Clone, Serialize, Deserialize)]
96pub struct PublicationSignature {
97    pub algorithm: String,
98    pub key_id: String,
99    pub value: String,
100}
101
102/// TSA timestamp
103#[derive(Debug, Clone, Serialize, Deserialize)]
104pub struct TsaTimestamp {
105    pub tsa_url: String,
106    pub timestamp: String,
107    pub token: String,
108}
109
110/// Best-effort inspection result for a stored TSA token.
111#[derive(Debug, Clone, Serialize, Deserialize)]
112pub struct TsaTokenInspection {
113    pub token_present: bool,
114    pub token_base64_valid: bool,
115    pub token_der_nonempty: bool,
116    pub extracted_timestamp: Option<String>,
117}
118
119/// Cryptographic CMS/PKCS#7 verification result for a TSA token.
120#[derive(Debug, Clone, Serialize, Deserialize)]
121pub struct TsaCmsVerification {
122    pub verified: bool,
123    pub extracted_timestamp: Option<String>,
124}
125
126/// TSA token CMS verification error.
127#[derive(Debug, thiserror::Error)]
128pub enum TsaCmsVerifyError {
129    #[error("TSA CMS verification backend unavailable: {0}")]
130    BackendUnavailable(String),
131    #[error("TSA token missing")]
132    TokenMissing,
133    #[error("TSA token base64 decode failed: {0}")]
134    TokenBase64(String),
135    #[error("TSA token PKCS#7 parse failed: {0}")]
136    Pkcs7Parse(String),
137    #[error("TSA trust store error: {0}")]
138    TrustStore(String),
139    #[error("TSA CMS verification failed: {0}")]
140    Verify(String),
141}
142
143/// Publication service
144pub struct PublicationService {
145    /// Previous day root
146    previous_day_root: Option<String>,
147}
148
149impl Default for PublicationService {
150    fn default() -> Self {
151        Self::new()
152    }
153}
154
155impl PublicationService {
156    /// Create new publication service
157    pub fn new() -> Self {
158        PublicationService {
159            previous_day_root: None,
160        }
161    }
162
163    /// Create daily publication
164    pub fn create_daily_publication(
165        &self,
166        hourly_roots: &[String],
167        entry_count: u64,
168    ) -> DailyPublication {
169        let date = Utc::now().format("%Y-%m-%d").to_string();
170        let previous = self.previous_day_root.clone().unwrap_or_else(|| {
171            "0000000000000000000000000000000000000000000000000000000000000000".to_string()
172        });
173
174        // Compute root hash of all hourly roots
175        let root_hash = Self::compute_merkle_root(hourly_roots);
176
177        DailyPublication {
178            date,
179            root_hash,
180            entry_count,
181            hourly_roots: hourly_roots.to_vec(),
182            previous_day_root: previous,
183            created_at: Utc::now().to_rfc3339(),
184            signature: None,
185            tsa_timestamp: None,
186        }
187    }
188
189    /// Compute merkle root from list of hashes
190    fn compute_merkle_root(hashes: &[String]) -> String {
191        if hashes.is_empty() {
192            return "0000000000000000000000000000000000000000000000000000000000000000".to_string();
193        }
194
195        use sha2::{Digest, Sha256};
196
197        let mut current: Vec<Vec<u8>> = hashes.iter().map(|h| merkle_leaf_hash(h)).collect();
198
199        while current.len() > 1 {
200            let mut next = Vec::new();
201
202            for chunk in current.chunks(2) {
203                let left = &chunk[0];
204                let right = if chunk.len() == 2 {
205                    &chunk[1]
206                } else {
207                    &chunk[0]
208                };
209                let mut hasher = Sha256::new();
210                hasher.update([0x01]);
211                hasher.update(left);
212                hasher.update(right);
213                next.push(hasher.finalize().to_vec());
214            }
215
216            current = next;
217        }
218
219        hex_encode(&current[0])
220    }
221
222    /// Sign publication
223    pub fn sign_publication(&mut self, publication: &mut DailyPublication, signature: &[u8]) {
224        self.sign_publication_with_metadata(
225            publication,
226            signature,
227            "RSA-PSS-SHA256",
228            "rnbc-audit-sig-2026",
229        );
230    }
231
232    /// Sign publication with explicit metadata (useful for API-driven integrations).
233    pub fn sign_publication_with_metadata(
234        &mut self,
235        publication: &mut DailyPublication,
236        signature: &[u8],
237        algorithm: &str,
238        key_id: &str,
239    ) {
240        publication.signature = Some(PublicationSignature {
241            algorithm: algorithm.to_string(),
242            key_id: key_id.to_string(),
243            value: base64_encode(signature),
244        });
245
246        // Store previous day root for chaining
247        self.previous_day_root = Some(publication.root_hash.clone());
248    }
249
250    /// Publish to a local filesystem directory (precursor to WORM/object storage backends).
251    pub fn publish_to_filesystem<P: AsRef<Path>>(
252        &self,
253        publication: &DailyPublication,
254        directory: P,
255        write_gzip: bool,
256    ) -> Result<FilesystemPublication, crate::error::LogError> {
257        let dir = directory.as_ref();
258        std::fs::create_dir_all(dir)
259            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))?;
260
261        let basename = publication.publication_basename();
262        let json_path = dir.join(format!("{basename}.json"));
263        publication.write_canonical_json_file(&json_path)?;
264
265        let gzip_path = if write_gzip {
266            let path = dir.join(format!("{basename}.json.gz"));
267            publication.write_canonical_json_gzip_file(&path)?;
268            Some(path)
269        } else {
270            None
271        };
272
273        Ok(FilesystemPublication {
274            json_path,
275            gzip_path,
276        })
277    }
278
279    /// Add TSA timestamp metadata.
280    ///
281    /// `mock://` URLs are supported for local testing.
282    ///
283    /// `http(s)://` URLs use an experimental RFC 3161 request path that retrieves
284    /// and stores the TSA token, but does not yet perform full CMS/token validation.
285    pub async fn add_tsa_timestamp(
286        &mut self,
287        publication: &mut DailyPublication,
288        tsa_url: &str,
289    ) -> Result<(), TsaError> {
290        // Serialize publication hash for TSA request
291        let hash_to_timestamp = &publication.root_hash;
292
293        // In production, this would be a proper RFC 3161 request
294        // For now, we'll implement a basic timestamp request structure
295        let timestamp_request = TsaRequest {
296            hash: hash_to_timestamp.clone(),
297            algorithm: "SHA256".to_string(),
298            nonce: uuid::Uuid::new_v4().to_string(),
299        };
300
301        // Make request to TSA (in production, use actual TSA server)
302        let response = self.request_timestamp(tsa_url, &timestamp_request).await?;
303
304        publication.tsa_timestamp = Some(TsaTimestamp {
305            tsa_url: tsa_url.to_string(),
306            timestamp: response.timestamp,
307            token: response.token,
308        });
309
310        tracing::info!(
311            "TSA timestamp added for publication {} at {}",
312            publication.date,
313            publication
314                .tsa_timestamp
315                .as_ref()
316                .map(|t| t.timestamp.as_str())
317                .map_or("unknown", |v| v)
318        );
319
320        Ok(())
321    }
322
323    /// Request timestamp from TSA server.
324    ///
325    /// Supports:
326    /// - `mock://...` for tests
327    /// - `http(s)://...` experimental RFC 3161 transport (token retrieval only)
328    async fn request_timestamp(
329        &self,
330        tsa_url: &str,
331        request: &TsaRequest,
332    ) -> Result<TsaResponse, TsaError> {
333        if tsa_url.starts_with("mock://") {
334            tracing::warn!("Using mock TSA timestamp provider: {}", tsa_url);
335            return Ok(TsaResponse {
336                timestamp: chrono::Utc::now().to_rfc3339(),
337                token: format!("mock-sha256={}", request.hash),
338                tsa_certificate: "placeholder".to_string(),
339            });
340        }
341
342        if !(tsa_url.starts_with("https://") || tsa_url.starts_with("http://")) {
343            return Err(TsaError::UnsupportedScheme(tsa_url.to_string()));
344        }
345
346        let digest_bytes = hex_decode(&request.hash).map_err(TsaError::Encoding)?;
347        let body = build_rfc3161_timestamp_query(&digest_bytes, &request.nonce)?;
348
349        tracing::info!("Requesting TSA token from {}", tsa_url);
350        let client = reqwest::Client::new();
351        let resp = client
352            .post(tsa_url)
353            .header("Content-Type", "application/timestamp-query")
354            .header("Accept", "application/timestamp-reply")
355            .body(body)
356            .send()
357            .await?;
358
359        let status_code = resp.status();
360        if !status_code.is_success() {
361            return Err(TsaError::Server(format!(
362                "HTTP {} from TSA endpoint",
363                status_code
364            )));
365        }
366
367        let date_header = resp
368            .headers()
369            .get(reqwest::header::DATE)
370            .and_then(|v| v.to_str().ok())
371            .map(str::to_string);
372        let bytes = resp.bytes().await?;
373
374        let tsa_reply = parse_timestamp_response(&bytes)?;
375        if tsa_reply.status != 0 && tsa_reply.status != 1 {
376            return Err(TsaError::Server(format!(
377                "TSA rejected request with status {}",
378                tsa_reply.status
379            )));
380        }
381
382        let token_der = tsa_reply
383            .time_stamp_token_der
384            .ok_or(TsaError::InvalidResponse)?;
385
386        // Best-effort timestamp extraction from token bytes (GeneralizedTime scan).
387        // Full CMS/ESS validation is pending.
388        let timestamp = extract_generalized_time_rfc3339(&token_der)
389            .or_else(|| date_header.and_then(parse_http_date_to_rfc3339))
390            .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
391
392        Ok(TsaResponse {
393            timestamp,
394            token: base64_encode(&token_der),
395            tsa_certificate: "unparsed".to_string(),
396        })
397    }
398}
399
400impl TsaTimestamp {
401    /// Best-effort validation/inspection of stored TSA token encoding and timestamp extraction.
402    ///
403    /// This does not perform CMS/PKCS#7 signature validation.
404    pub fn inspect_token(&self) -> TsaTokenInspection {
405        use base64::{engine::general_purpose::STANDARD, Engine as _};
406
407        if self.token.is_empty() {
408            return TsaTokenInspection {
409                token_present: false,
410                token_base64_valid: false,
411                token_der_nonempty: false,
412                extracted_timestamp: None,
413            };
414        }
415
416        let der = match STANDARD.decode(self.token.as_bytes()) {
417            Ok(v) => v,
418            Err(_) => {
419                return TsaTokenInspection {
420                    token_present: true,
421                    token_base64_valid: false,
422                    token_der_nonempty: false,
423                    extracted_timestamp: None,
424                };
425            }
426        };
427
428        let extracted_timestamp = extract_generalized_time_rfc3339(&der);
429        TsaTokenInspection {
430            token_present: true,
431            token_base64_valid: true,
432            token_der_nonempty: !der.is_empty(),
433            extracted_timestamp,
434        }
435    }
436
437    /// Verify the `timeStampToken` CMS/PKCS#7 signature against trusted PEM certificates.
438    ///
439    /// This validates CMS signature and certificate chain. RFC3161-specific TSTInfo checks
440    /// (message imprint, policy, nonce) are not yet enforced here.
441    #[cfg(feature = "tsa-cms-openssl")]
442    pub fn verify_cms_signature_with_pem_roots(
443        &self,
444        trust_store_pem: &[u8],
445    ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
446        use base64::{engine::general_purpose::STANDARD, Engine as _};
447        use openssl::pkcs7::{Pkcs7, Pkcs7Flags};
448        use openssl::stack::Stack;
449        use openssl::x509::{store::X509StoreBuilder, X509};
450
451        if self.token.is_empty() {
452            return Err(TsaCmsVerifyError::TokenMissing);
453        }
454
455        let der = STANDARD
456            .decode(self.token.as_bytes())
457            .map_err(|e| TsaCmsVerifyError::TokenBase64(e.to_string()))?;
458        let extracted_timestamp = extract_generalized_time_rfc3339(&der);
459
460        let pkcs7 =
461            Pkcs7::from_der(&der).map_err(|e| TsaCmsVerifyError::Pkcs7Parse(e.to_string()))?;
462
463        let certs = X509::stack_from_pem(trust_store_pem)
464            .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
465        let mut store_builder =
466            X509StoreBuilder::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
467        for cert in certs {
468            store_builder
469                .add_cert(cert)
470                .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
471        }
472        let store = store_builder.build();
473
474        let cert_stack: Stack<X509> =
475            Stack::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
476        let mut out = Vec::<u8>::new();
477        pkcs7
478            .verify(
479                &cert_stack,
480                &store,
481                None,
482                Some(&mut out),
483                Pkcs7Flags::empty(),
484            )
485            .map_err(|e| TsaCmsVerifyError::Verify(e.to_string()))?;
486
487        Ok(TsaCmsVerification {
488            verified: true,
489            extracted_timestamp,
490        })
491    }
492
493    #[cfg(not(feature = "tsa-cms-openssl"))]
494    pub fn verify_cms_signature_with_pem_roots(
495        &self,
496        _trust_store_pem: &[u8],
497    ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
498        Err(TsaCmsVerifyError::BackendUnavailable(
499            "immutable-logging compiled without feature `tsa-cms-openssl`".to_string(),
500        ))
501    }
502}
503
504/// Files created by a filesystem publication backend.
505#[derive(Debug, Clone)]
506pub struct FilesystemPublication {
507    pub json_path: PathBuf,
508    pub gzip_path: Option<PathBuf>,
509}
510
511/// TSA Request structure (RFC 3161 subset)
512#[derive(Debug, Clone, Serialize, Deserialize)]
513struct TsaRequest {
514    hash: String,
515    algorithm: String,
516    nonce: String,
517}
518
519/// TSA Response structure (RFC 3161 subset)
520#[derive(Debug, Clone, Serialize, Deserialize)]
521struct TsaResponse {
522    timestamp: String,
523    token: String,
524    tsa_certificate: String,
525}
526
527/// TSA Error type
528#[derive(Debug, thiserror::Error)]
529pub enum TsaError {
530    #[error("Network error: {0}")]
531    Network(#[from] reqwest::Error),
532
533    #[error("Encoding error: {0}")]
534    Encoding(String),
535
536    #[error("TSA server error: {0}")]
537    Server(String),
538
539    #[error("Unsupported TSA URL scheme: {0}")]
540    UnsupportedScheme(String),
541
542    #[error("Invalid response from TSA")]
543    InvalidResponse,
544}
545
546/// Base64 encode
547fn base64_encode(data: &[u8]) -> String {
548    use base64::{engine::general_purpose::STANDARD, Engine as _};
549    STANDARD.encode(data)
550}
551
552fn canonical_publication_json_value(
553    publication: &DailyPublication,
554) -> Result<Value, crate::error::LogError> {
555    let signature = match publication.signature.as_ref() {
556        Some(sig) => serde_json::json!({
557            "algorithm": sig.algorithm,
558            "key_id": sig.key_id,
559            "value": sig.value,
560        }),
561        None => Value::Null,
562    };
563
564    let tsa_timestamp = match publication.tsa_timestamp.as_ref() {
565        Some(tsa) => serde_json::json!({
566            "timestamp": tsa.timestamp,
567            "token": tsa.token,
568            "tsa_url": tsa.tsa_url,
569        }),
570        None => Value::Null,
571    };
572
573    serde_json::from_value::<Value>(serde_json::json!({
574        "schema_version": "rsrp-daily-publication-v1",
575        "created_at": publication.created_at,
576        "date": publication.date,
577        "entry_count": publication.entry_count,
578        "hourly_roots": publication.hourly_roots,
579        "previous_day_root": publication.previous_day_root,
580        "root_hash": publication.root_hash,
581        "signature": signature,
582        "tsa_timestamp": tsa_timestamp,
583    }))
584    .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
585}
586
587fn canonical_json_bytes(value: &Value) -> Result<Vec<u8>, crate::error::LogError> {
588    let mut out = String::new();
589    write_canonical_json(value, &mut out)?;
590    Ok(out.into_bytes())
591}
592
593fn write_canonical_json(value: &Value, out: &mut String) -> Result<(), crate::error::LogError> {
594    match value {
595        Value::Null => out.push_str("null"),
596        Value::Bool(v) => out.push_str(if *v { "true" } else { "false" }),
597        Value::Number(v) => out.push_str(&v.to_string()),
598        Value::String(v) => {
599            let encoded = serde_json::to_string(v)
600                .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
601            out.push_str(&encoded);
602        }
603        Value::Array(values) => {
604            out.push('[');
605            for (i, entry) in values.iter().enumerate() {
606                if i > 0 {
607                    out.push(',');
608                }
609                write_canonical_json(entry, out)?;
610            }
611            out.push(']');
612        }
613        Value::Object(map) => {
614            let mut keys: Vec<&str> = map.keys().map(|k| k.as_str()).collect();
615            keys.sort_unstable();
616            out.push('{');
617            for (i, key) in keys.iter().enumerate() {
618                if i > 0 {
619                    out.push(',');
620                }
621                let encoded_key = serde_json::to_string(key)
622                    .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
623                out.push_str(&encoded_key);
624                out.push(':');
625                let value = map.get(*key).ok_or_else(|| {
626                    crate::error::LogError::SerializationError(
627                        "Missing canonical JSON key".to_string(),
628                    )
629                })?;
630                write_canonical_json(value, out)?;
631            }
632            out.push('}');
633        }
634    }
635    Ok(())
636}
637
638fn merkle_leaf_hash(input: &str) -> Vec<u8> {
639    let bytes = hex_decode(input).unwrap_or_else(|_| input.as_bytes().to_vec());
640    let mut hasher = sha2::Sha256::new();
641    hasher.update([0x00]);
642    hasher.update(&bytes);
643    hasher.finalize().to_vec()
644}
645
646fn hex_encode(data: &[u8]) -> String {
647    data.iter().map(|b| format!("{:02x}", b)).collect()
648}
649
650fn hex_decode(s: &str) -> Result<Vec<u8>, String> {
651    if !s.len().is_multiple_of(2) {
652        return Err("Invalid hex length".to_string());
653    }
654    (0..s.len())
655        .step_by(2)
656        .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map_err(|_| "Invalid hex".to_string()))
657        .collect()
658}
659
660fn build_rfc3161_timestamp_query(
661    message_digest: &[u8],
662    nonce_text: &str,
663) -> Result<Vec<u8>, TsaError> {
664    // We support SHA-256 only in this implementation path.
665    if message_digest.len() != 32 {
666        return Err(TsaError::Encoding(format!(
667            "expected SHA-256 digest (32 bytes), got {}",
668            message_digest.len()
669        )));
670    }
671
672    let nonce_hash = sha2::Sha256::digest(nonce_text.as_bytes());
673    let nonce = der_integer_positive(&nonce_hash[..16]);
674
675    let algorithm_identifier = der_sequence(&[
676        der_oid(&[2, 16, 840, 1, 101, 3, 4, 2, 1]), // sha256
677        der_null(),
678    ]);
679    let message_imprint = der_sequence(&[algorithm_identifier, der_octet_string(message_digest)]);
680
681    Ok(der_sequence(&[
682        der_integer_u64(1), // version v1
683        message_imprint,
684        nonce,             // nonce
685        der_boolean(true), // certReq = TRUE
686    ]))
687}
688
689struct ParsedTsaResponse {
690    status: i64,
691    time_stamp_token_der: Option<Vec<u8>>,
692}
693
694fn parse_timestamp_response(bytes: &[u8]) -> Result<ParsedTsaResponse, TsaError> {
695    let (outer_tag, outer_len, outer_hdr) = der_read_tlv(bytes, 0)?;
696    if outer_tag != 0x30 || outer_hdr + outer_len > bytes.len() {
697        return Err(TsaError::InvalidResponse);
698    }
699    let outer = &bytes[outer_hdr..outer_hdr + outer_len];
700
701    let (status_tag, status_len, status_hdr) = der_read_tlv(outer, 0)?;
702    if status_tag != 0x30 || status_hdr + status_len > outer.len() {
703        return Err(TsaError::InvalidResponse);
704    }
705    let status_seq = &outer[status_hdr..status_hdr + status_len];
706    let (int_tag, int_len, int_hdr) = der_read_tlv(status_seq, 0)?;
707    if int_tag != 0x02 || int_hdr + int_len > status_seq.len() {
708        return Err(TsaError::InvalidResponse);
709    }
710    let status = der_parse_integer_i64(&status_seq[int_hdr..int_hdr + int_len])?;
711
712    let next = status_hdr + status_len;
713    let time_stamp_token_der = if next < outer.len() {
714        let (_tag, len, hdr) = der_read_tlv(outer, next)?;
715        Some(outer[next..next + hdr + len].to_vec())
716    } else {
717        None
718    };
719
720    Ok(ParsedTsaResponse {
721        status,
722        time_stamp_token_der,
723    })
724}
725
726fn extract_generalized_time_rfc3339(bytes: &[u8]) -> Option<String> {
727    let mut i = 0usize;
728    while i + 2 <= bytes.len() {
729        if bytes[i] == 0x18 {
730            let (tag, len, hdr) = der_read_tlv(bytes, i).ok()?;
731            if tag != 0x18 || i + hdr + len > bytes.len() {
732                return None;
733            }
734            let s = std::str::from_utf8(&bytes[i + hdr..i + hdr + len]).ok()?;
735            if let Some(trimmed) = s.strip_suffix('Z') {
736                if let Ok(naive) = chrono::NaiveDateTime::parse_from_str(trimmed, "%Y%m%d%H%M%S") {
737                    let dt = chrono::DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
738                    return Some(dt.to_rfc3339());
739                }
740            }
741        }
742        i += 1;
743    }
744    None
745}
746
747fn parse_http_date_to_rfc3339(value: String) -> Option<String> {
748    let dt = chrono::DateTime::parse_from_rfc2822(&value).ok()?;
749    Some(dt.with_timezone(&Utc).to_rfc3339())
750}
751
752fn der_read_tlv(input: &[u8], offset: usize) -> Result<(u8, usize, usize), TsaError> {
753    if offset + 2 > input.len() {
754        return Err(TsaError::InvalidResponse);
755    }
756    let tag = input[offset];
757    let first_len = input[offset + 1];
758    if first_len & 0x80 == 0 {
759        let len = first_len as usize;
760        Ok((tag, len, 2))
761    } else {
762        let n = (first_len & 0x7f) as usize;
763        if n == 0 || n > 4 || offset + 2 + n > input.len() {
764            return Err(TsaError::InvalidResponse);
765        }
766        let mut len = 0usize;
767        for b in &input[offset + 2..offset + 2 + n] {
768            len = (len << 8) | (*b as usize);
769        }
770        Ok((tag, len, 2 + n))
771    }
772}
773
774fn der_parse_integer_i64(bytes: &[u8]) -> Result<i64, TsaError> {
775    if bytes.is_empty() || bytes.len() > 8 {
776        return Err(TsaError::InvalidResponse);
777    }
778    let mut v: i64 = 0;
779    for b in bytes {
780        v = (v << 8) | (*b as i64);
781    }
782    Ok(v)
783}
784
785fn der_len(len: usize) -> Vec<u8> {
786    if len < 128 {
787        return vec![len as u8];
788    }
789    let mut tmp = Vec::new();
790    let mut n = len;
791    while n > 0 {
792        tmp.push((n & 0xff) as u8);
793        n >>= 8;
794    }
795    tmp.reverse();
796    let mut out = vec![0x80 | (tmp.len() as u8)];
797    out.extend(tmp);
798    out
799}
800
801fn der_wrap(tag: u8, value: &[u8]) -> Vec<u8> {
802    let mut out = vec![tag];
803    out.extend(der_len(value.len()));
804    out.extend(value);
805    out
806}
807
808fn der_sequence(parts: &[Vec<u8>]) -> Vec<u8> {
809    let mut content = Vec::new();
810    for part in parts {
811        content.extend(part);
812    }
813    der_wrap(0x30, &content)
814}
815
816fn der_null() -> Vec<u8> {
817    vec![0x05, 0x00]
818}
819
820fn der_boolean(v: bool) -> Vec<u8> {
821    vec![0x01, 0x01, if v { 0xff } else { 0x00 }]
822}
823
824fn der_integer_u64(v: u64) -> Vec<u8> {
825    let mut bytes = if v == 0 {
826        vec![0]
827    } else {
828        let mut tmp = Vec::new();
829        let mut n = v;
830        while n > 0 {
831            tmp.push((n & 0xff) as u8);
832            n >>= 8;
833        }
834        tmp.reverse();
835        tmp
836    };
837    if bytes[0] & 0x80 != 0 {
838        bytes.insert(0, 0);
839    }
840    der_wrap(0x02, &bytes)
841}
842
843fn der_integer_positive(bytes: &[u8]) -> Vec<u8> {
844    let mut v = bytes.to_vec();
845    while v.first() == Some(&0) && v.len() > 1 {
846        v.remove(0);
847    }
848    if v.first().map(|b| b & 0x80 != 0).unwrap_or(false) {
849        v.insert(0, 0);
850    }
851    der_wrap(0x02, &v)
852}
853
854fn der_octet_string(bytes: &[u8]) -> Vec<u8> {
855    der_wrap(0x04, bytes)
856}
857
858fn der_oid(oid: &[u32]) -> Vec<u8> {
859    let mut out = Vec::new();
860    if oid.len() < 2 {
861        return der_wrap(0x06, &out);
862    }
863    out.push((oid[0] * 40 + oid[1]) as u8);
864    for &arc in &oid[2..] {
865        let mut stack = [0u8; 5];
866        let mut idx = stack.len();
867        let mut n = arc;
868        stack[idx - 1] = (n & 0x7f) as u8;
869        idx -= 1;
870        n >>= 7;
871        while n > 0 {
872            stack[idx - 1] = 0x80 | ((n & 0x7f) as u8);
873            idx -= 1;
874            n >>= 7;
875        }
876        out.extend(&stack[idx..]);
877    }
878    der_wrap(0x06, &out)
879}
880
881#[cfg(test)]
882mod tests {
883    use super::*;
884    use base64::{engine::general_purpose::STANDARD, Engine as _};
885    use std::io::Read;
886    use tempfile::tempdir;
887
888    #[test]
889    fn test_daily_publication_and_signature_chain() {
890        let mut service = PublicationService::new();
891        let hourly_roots = vec!["a".repeat(64), "b".repeat(64)];
892
893        let mut day1 = service.create_daily_publication(&hourly_roots, 42);
894        assert_eq!(day1.entry_count, 42);
895        assert_eq!(day1.hourly_roots.len(), 2);
896        assert_eq!(day1.previous_day_root, "0".repeat(64));
897        assert!(day1.signature.is_none());
898
899        service.sign_publication(&mut day1, b"sig");
900        let sig = day1.signature.as_ref().expect("signature set");
901        assert_eq!(sig.algorithm, "RSA-PSS-SHA256");
902        assert_eq!(sig.value, STANDARD.encode(b"sig"));
903
904        let day2 = service.create_daily_publication(&hourly_roots, 1);
905        assert_eq!(day2.previous_day_root, day1.root_hash);
906    }
907
908    #[test]
909    fn test_add_tsa_timestamp_mock_only() {
910        let mut service = PublicationService::new();
911        let hourly_roots = vec!["c".repeat(64)];
912        let mut publication = service.create_daily_publication(&hourly_roots, 1);
913
914        let rt = tokio::runtime::Builder::new_current_thread()
915            .build()
916            .expect("runtime");
917
918        rt.block_on(async {
919            service
920                .add_tsa_timestamp(&mut publication, "mock://tsa")
921                .await
922                .expect("mock TSA works");
923        });
924
925        let tsa = publication
926            .tsa_timestamp
927            .as_ref()
928            .expect("tsa timestamp set");
929        assert_eq!(tsa.tsa_url, "mock://tsa");
930        assert!(tsa.token.starts_with("mock-sha256="));
931    }
932
933    #[test]
934    fn test_add_tsa_timestamp_rejects_non_mock() {
935        let mut service = PublicationService::new();
936        let hourly_roots = vec!["d".repeat(64)];
937        let mut publication = service.create_daily_publication(&hourly_roots, 1);
938
939        let rt = tokio::runtime::Builder::new_current_thread()
940            .build()
941            .expect("runtime");
942
943        let err = rt.block_on(async {
944            service
945                .add_tsa_timestamp(&mut publication, "https://tsa.example")
946                .await
947                .expect_err("network call should fail for placeholder endpoint")
948        });
949
950        match err {
951            TsaError::Server(_) | TsaError::Network(_) => {}
952            other => panic!("unexpected error: {other}"),
953        }
954        assert!(publication.tsa_timestamp.is_none());
955    }
956
957    #[test]
958    fn test_build_rfc3161_query_der_contains_sha256_oid() {
959        let digest = [0x11u8; 32];
960        let req = build_rfc3161_timestamp_query(&digest, "nonce").expect("query");
961        // sha256 OID bytes: 06 09 60 86 48 01 65 03 04 02 01
962        let oid = [
963            0x06, 0x09, 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01,
964        ];
965        assert!(req.windows(oid.len()).any(|w| w == oid));
966    }
967
968    #[test]
969    fn test_parse_timestamp_response_status_only() {
970        // TimeStampResp ::= SEQUENCE { status PKIStatusInfo }
971        let resp = [0x30, 0x05, 0x30, 0x03, 0x02, 0x01, 0x00];
972        let parsed = parse_timestamp_response(&resp).expect("parse");
973        assert_eq!(parsed.status, 0);
974        assert!(parsed.time_stamp_token_der.is_none());
975    }
976
977    #[test]
978    fn test_extract_generalized_time_best_effort() {
979        // DER GeneralizedTime: "20260226083045Z"
980        let mut bytes = vec![0x18, 0x0f];
981        bytes.extend_from_slice(b"20260226083045Z");
982        let ts = extract_generalized_time_rfc3339(&bytes).expect("timestamp");
983        assert!(ts.starts_with("2026-02-26T08:30:45"));
984    }
985
986    #[test]
987    fn test_canonical_json_export_is_deterministic() {
988        let service = PublicationService::new();
989        let publication = service.create_daily_publication(&["e".repeat(64)], 7);
990
991        let json1 = publication.to_canonical_json().expect("json1");
992        let json2 = publication.to_canonical_json().expect("json2");
993
994        assert_eq!(json1, json2);
995        assert!(!json1.contains('\n'));
996        assert!(json1.contains("\"entry_count\":7"));
997        assert!(json1.contains("\"hourly_roots\""));
998    }
999
1000    #[test]
1001    fn test_canonical_json_gzip_roundtrip() {
1002        let service = PublicationService::new();
1003        let publication = service.create_daily_publication(&["f".repeat(64)], 3);
1004
1005        let original = publication.to_canonical_json_bytes().expect("original");
1006        let compressed = publication.to_canonical_json_gzip().expect("gzip");
1007        assert!(!compressed.is_empty());
1008
1009        let mut decoder = flate2::read::GzDecoder::new(compressed.as_slice());
1010        let mut decompressed = Vec::new();
1011        decoder.read_to_end(&mut decompressed).expect("decompress");
1012
1013        assert_eq!(decompressed, original);
1014    }
1015
1016    #[test]
1017    fn test_publication_basename_is_stable() {
1018        let service = PublicationService::new();
1019        let publication = service.create_daily_publication(&["bb".repeat(32)], 1);
1020        let base = publication.publication_basename();
1021
1022        assert!(base.starts_with("daily-publication-"));
1023        assert!(base.contains(&publication.date));
1024        assert!(base.ends_with(&publication.root_hash[..16]));
1025    }
1026
1027    #[test]
1028    fn test_verify_root_hash_detects_tamper() {
1029        let service = PublicationService::new();
1030        let mut publication =
1031            service.create_daily_publication(&["aa".repeat(32), "bb".repeat(32)], 2);
1032        assert!(publication.verify_root_hash());
1033
1034        publication.hourly_roots.push("cc".repeat(32));
1035        assert!(!publication.verify_root_hash());
1036    }
1037
1038    #[test]
1039    fn test_tsa_token_inspection() {
1040        let tsa = TsaTimestamp {
1041            tsa_url: "https://tsa.example".to_string(),
1042            timestamp: "2026-02-26T00:00:00Z".to_string(),
1043            token: base64_encode(&[
1044                0x18, 0x0f, b'2', b'0', b'2', b'6', b'0', b'2', b'2', b'6', b'0', b'8', b'3', b'0',
1045                b'4', b'5', b'Z',
1046            ]),
1047        };
1048        let inspected = tsa.inspect_token();
1049        assert!(inspected.token_present);
1050        assert!(inspected.token_base64_valid);
1051        assert!(inspected.token_der_nonempty);
1052        assert!(inspected.extracted_timestamp.is_some());
1053
1054        let bad = TsaTimestamp {
1055            tsa_url: "https://tsa.example".to_string(),
1056            timestamp: "2026-02-26T00:00:00Z".to_string(),
1057            token: "%%%".to_string(),
1058        };
1059        let bad_inspected = bad.inspect_token();
1060        assert!(bad_inspected.token_present);
1061        assert!(!bad_inspected.token_base64_valid);
1062    }
1063
1064    #[cfg(feature = "tsa-cms-openssl")]
1065    #[test]
1066    fn test_tsa_cms_verify_rejects_invalid_base64() {
1067        let tsa = TsaTimestamp {
1068            tsa_url: "https://tsa.example".to_string(),
1069            timestamp: "2026-02-26T00:00:00Z".to_string(),
1070            token: "%%%".to_string(),
1071        };
1072
1073        let err = tsa
1074            .verify_cms_signature_with_pem_roots(b"")
1075            .expect_err("invalid base64 must fail");
1076        match err {
1077            TsaCmsVerifyError::TokenBase64(_) => {}
1078            other => panic!("unexpected error: {other}"),
1079        }
1080    }
1081
1082    #[cfg(feature = "tsa-cms-openssl")]
1083    #[test]
1084    fn test_tsa_cms_verify_rejects_non_pkcs7_der() {
1085        let tsa = TsaTimestamp {
1086            tsa_url: "https://tsa.example".to_string(),
1087            timestamp: "2026-02-26T00:00:00Z".to_string(),
1088            token: base64_encode(&[0x30, 0x03, 0x02, 0x01, 0x00]),
1089        };
1090
1091        let err = tsa
1092            .verify_cms_signature_with_pem_roots(b"")
1093            .expect_err("non-pkcs7 der must fail");
1094        match err {
1095            TsaCmsVerifyError::Pkcs7Parse(_) | TsaCmsVerifyError::TrustStore(_) => {}
1096            other => panic!("unexpected error: {other}"),
1097        }
1098    }
1099
1100    #[cfg(not(feature = "tsa-cms-openssl"))]
1101    #[test]
1102    fn test_tsa_cms_verify_reports_backend_unavailable_without_feature() {
1103        let tsa = TsaTimestamp {
1104            tsa_url: "https://tsa.example".to_string(),
1105            timestamp: "2026-02-26T00:00:00Z".to_string(),
1106            token: "%%%".to_string(),
1107        };
1108
1109        let err = tsa
1110            .verify_cms_signature_with_pem_roots(b"")
1111            .expect_err("backend should be unavailable without feature");
1112        match err {
1113            TsaCmsVerifyError::BackendUnavailable(_) => {}
1114            other => panic!("unexpected error: {other}"),
1115        }
1116    }
1117
1118    #[test]
1119    fn test_publish_to_filesystem_writes_json_and_gzip() {
1120        let tmp = tempdir().expect("tempdir");
1121        let service = PublicationService::new();
1122        let publication = service.create_daily_publication(&["aa".repeat(32)], 11);
1123
1124        let written = service
1125            .publish_to_filesystem(&publication, tmp.path(), true)
1126            .expect("publish");
1127
1128        assert!(written.json_path.exists());
1129        let gzip_path = written.gzip_path.as_ref().expect("gzip path");
1130        assert!(gzip_path.exists());
1131
1132        let json_bytes = std::fs::read(&written.json_path).expect("json bytes");
1133        assert_eq!(
1134            json_bytes,
1135            publication
1136                .to_canonical_json_bytes()
1137                .expect("canonical json")
1138        );
1139
1140        let gz_bytes = std::fs::read(gzip_path).expect("gzip bytes");
1141        let mut decoder = flate2::read::GzDecoder::new(gz_bytes.as_slice());
1142        let mut out = Vec::new();
1143        decoder.read_to_end(&mut out).expect("decompress");
1144        assert_eq!(out, json_bytes);
1145    }
1146}