Skip to main content

immutable_logging/
publication.rs

1//! Publication - Daily audit publication
2
3use chrono::Utc;
4use serde::{Deserialize, Serialize};
5use serde_json::Value;
6use sha2::Digest as _;
7use std::io::Write;
8use std::path::{Path, PathBuf};
9
10/// Daily audit publication
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct DailyPublication {
13    /// Publication date
14    pub date: String,
15    /// Root hash of all hourly roots
16    pub root_hash: String,
17    /// Total entry count
18    pub entry_count: u64,
19    /// Hourly root hashes
20    pub hourly_roots: Vec<String>,
21    /// Previous day root (for chaining)
22    pub previous_day_root: String,
23    /// Creation timestamp
24    pub created_at: String,
25    /// Signature
26    pub signature: Option<PublicationSignature>,
27    /// TSA timestamp
28    pub tsa_timestamp: Option<TsaTimestamp>,
29}
30
31impl DailyPublication {
32    /// Export as canonical deterministic JSON bytes.
33    pub fn to_canonical_json_bytes(&self) -> Result<Vec<u8>, crate::error::LogError> {
34        let canonical = canonical_publication_json_value(self)?;
35        canonical_json_bytes(&canonical)
36    }
37
38    /// Export as compact deterministic JSON string.
39    pub fn to_canonical_json(&self) -> Result<String, crate::error::LogError> {
40        let bytes = self.to_canonical_json_bytes()?;
41        String::from_utf8(bytes)
42            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
43    }
44
45    /// Export as gzip-compressed canonical JSON.
46    pub fn to_canonical_json_gzip(&self) -> Result<Vec<u8>, crate::error::LogError> {
47        let json = self.to_canonical_json_bytes()?;
48        let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
49        encoder
50            .write_all(&json)
51            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
52        encoder
53            .finish()
54            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
55    }
56
57    /// Build a deterministic basename suitable for filesystem/object publication backends.
58    pub fn publication_basename(&self) -> String {
59        let root_prefix = self.root_hash.get(..16).unwrap_or(&self.root_hash);
60        format!("daily-publication-{}-{}", self.date, root_prefix)
61    }
62
63    /// Recompute the publication root from `hourly_roots`.
64    pub fn recompute_root_hash(&self) -> String {
65        PublicationService::compute_merkle_root(&self.hourly_roots)
66    }
67
68    /// Check whether the stored `root_hash` matches the recomputed value.
69    pub fn verify_root_hash(&self) -> bool {
70        self.root_hash == self.recompute_root_hash()
71    }
72
73    /// Write canonical JSON to a file path.
74    pub fn write_canonical_json_file<P: AsRef<Path>>(
75        &self,
76        path: P,
77    ) -> Result<(), crate::error::LogError> {
78        let bytes = self.to_canonical_json_bytes()?;
79        std::fs::write(path, bytes)
80            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
81    }
82
83    /// Write gzip-compressed canonical JSON to a file path.
84    pub fn write_canonical_json_gzip_file<P: AsRef<Path>>(
85        &self,
86        path: P,
87    ) -> Result<(), crate::error::LogError> {
88        let bytes = self.to_canonical_json_gzip()?;
89        std::fs::write(path, bytes)
90            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
91    }
92}
93
94/// Publication signature
95#[derive(Debug, Clone, Serialize, Deserialize)]
96pub struct PublicationSignature {
97    pub algorithm: String,
98    pub key_id: String,
99    pub value: String,
100}
101
102/// TSA timestamp
103#[derive(Debug, Clone, Serialize, Deserialize)]
104pub struct TsaTimestamp {
105    pub tsa_url: String,
106    pub timestamp: String,
107    pub token: String,
108}
109
110/// Best-effort inspection result for a stored TSA token.
111#[derive(Debug, Clone, Serialize, Deserialize)]
112pub struct TsaTokenInspection {
113    pub token_present: bool,
114    pub token_base64_valid: bool,
115    pub token_der_nonempty: bool,
116    pub extracted_timestamp: Option<String>,
117}
118
119/// Cryptographic CMS/PKCS#7 verification result for a TSA token.
120#[derive(Debug, Clone, Serialize, Deserialize)]
121pub struct TsaCmsVerification {
122    pub verified: bool,
123    pub extracted_timestamp: Option<String>,
124}
125
126/// TSA token CMS verification error.
127#[derive(Debug, thiserror::Error)]
128pub enum TsaCmsVerifyError {
129    #[error("TSA CMS verification backend unavailable: {0}")]
130    BackendUnavailable(String),
131    #[error("TSA token missing")]
132    TokenMissing,
133    #[error("TSA token base64 decode failed: {0}")]
134    TokenBase64(String),
135    #[error("TSA token PKCS#7 parse failed: {0}")]
136    Pkcs7Parse(String),
137    #[error("TSA trust store error: {0}")]
138    TrustStore(String),
139    #[error("TSA CMS verification failed: {0}")]
140    Verify(String),
141}
142
143/// Publication service
144pub struct PublicationService {
145    /// Previous day root
146    previous_day_root: Option<String>,
147}
148
149impl Default for PublicationService {
150    fn default() -> Self {
151        Self::new()
152    }
153}
154
155impl PublicationService {
156    /// Create new publication service
157    pub fn new() -> Self {
158        PublicationService {
159            previous_day_root: None,
160        }
161    }
162
163    /// Create daily publication
164    pub fn create_daily_publication(
165        &self,
166        hourly_roots: &[String],
167        entry_count: u64,
168    ) -> DailyPublication {
169        let date = Utc::now().format("%Y-%m-%d").to_string();
170        self.create_daily_publication_for_date(&date, hourly_roots, entry_count)
171    }
172
173    pub fn create_daily_publication_for_date(
174        &self,
175        date: &str,
176        hourly_roots: &[String],
177        entry_count: u64,
178    ) -> DailyPublication {
179        let previous = self.previous_day_root.clone().unwrap_or_else(|| {
180            "0000000000000000000000000000000000000000000000000000000000000000".to_string()
181        });
182
183        // Compute root hash of all hourly roots
184        let root_hash = Self::compute_merkle_root(hourly_roots);
185
186        DailyPublication {
187            date: date.to_string(),
188            root_hash,
189            entry_count,
190            hourly_roots: hourly_roots.to_vec(),
191            previous_day_root: previous,
192            created_at: Utc::now().to_rfc3339(),
193            signature: None,
194            tsa_timestamp: None,
195        }
196    }
197
198    /// Compute merkle root from list of hashes
199    fn compute_merkle_root(hashes: &[String]) -> String {
200        if hashes.is_empty() {
201            return "0000000000000000000000000000000000000000000000000000000000000000".to_string();
202        }
203
204        use sha2::{Digest, Sha256};
205
206        let mut current: Vec<Vec<u8>> = hashes.iter().map(|h| merkle_leaf_hash(h)).collect();
207
208        while current.len() > 1 {
209            let mut next = Vec::new();
210
211            for chunk in current.chunks(2) {
212                let left = &chunk[0];
213                let right = if chunk.len() == 2 {
214                    &chunk[1]
215                } else {
216                    &chunk[0]
217                };
218                let mut hasher = Sha256::new();
219                hasher.update([0x01]);
220                hasher.update(left);
221                hasher.update(right);
222                next.push(hasher.finalize().to_vec());
223            }
224
225            current = next;
226        }
227
228        hex_encode(&current[0])
229    }
230
231    /// Sign publication
232    pub fn sign_publication(&mut self, publication: &mut DailyPublication, signature: &[u8]) {
233        self.sign_publication_with_metadata(
234            publication,
235            signature,
236            "RSA-PSS-SHA256",
237            "rnbc-audit-sig-2026",
238        );
239    }
240
241    /// Sign publication with explicit metadata (useful for API-driven integrations).
242    pub fn sign_publication_with_metadata(
243        &mut self,
244        publication: &mut DailyPublication,
245        signature: &[u8],
246        algorithm: &str,
247        key_id: &str,
248    ) {
249        publication.signature = Some(PublicationSignature {
250            algorithm: algorithm.to_string(),
251            key_id: key_id.to_string(),
252            value: base64_encode(signature),
253        });
254
255        // Store previous day root for chaining
256        self.previous_day_root = Some(publication.root_hash.clone());
257    }
258
259    /// Publish to a local filesystem directory (precursor to WORM/object storage backends).
260    pub fn publish_to_filesystem<P: AsRef<Path>>(
261        &self,
262        publication: &DailyPublication,
263        directory: P,
264        write_gzip: bool,
265    ) -> Result<FilesystemPublication, crate::error::LogError> {
266        let dir = directory.as_ref();
267        std::fs::create_dir_all(dir)
268            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))?;
269
270        let basename = publication.publication_basename();
271        let json_path = dir.join(format!("{basename}.json"));
272        publication.write_canonical_json_file(&json_path)?;
273
274        let gzip_path = if write_gzip {
275            let path = dir.join(format!("{basename}.json.gz"));
276            publication.write_canonical_json_gzip_file(&path)?;
277            Some(path)
278        } else {
279            None
280        };
281
282        Ok(FilesystemPublication {
283            json_path,
284            gzip_path,
285        })
286    }
287
288    /// Add TSA timestamp metadata.
289    ///
290    /// `mock://` URLs are supported for local testing.
291    ///
292    /// `http(s)://` URLs use an experimental RFC 3161 request path that retrieves
293    /// and stores the TSA token, but does not yet perform full CMS/token validation.
294    pub async fn add_tsa_timestamp(
295        &mut self,
296        publication: &mut DailyPublication,
297        tsa_url: &str,
298    ) -> Result<(), TsaError> {
299        // Serialize publication hash for TSA request
300        let hash_to_timestamp = &publication.root_hash;
301
302        // In production, this would be a proper RFC 3161 request
303        // For now, we'll implement a basic timestamp request structure
304        let timestamp_request = TsaRequest {
305            hash: hash_to_timestamp.clone(),
306            algorithm: "SHA256".to_string(),
307            nonce: uuid::Uuid::new_v4().to_string(),
308        };
309
310        // Make request to TSA (in production, use actual TSA server)
311        let response = self.request_timestamp(tsa_url, &timestamp_request).await?;
312
313        publication.tsa_timestamp = Some(TsaTimestamp {
314            tsa_url: tsa_url.to_string(),
315            timestamp: response.timestamp,
316            token: response.token,
317        });
318
319        tracing::info!(
320            "TSA timestamp added for publication {} at {}",
321            publication.date,
322            publication
323                .tsa_timestamp
324                .as_ref()
325                .map(|t| t.timestamp.as_str())
326                .map_or("unknown", |v| v)
327        );
328
329        Ok(())
330    }
331
332    /// Request timestamp from TSA server.
333    ///
334    /// Supports:
335    /// - `mock://...` for tests
336    /// - `http(s)://...` experimental RFC 3161 transport (token retrieval only)
337    async fn request_timestamp(
338        &self,
339        tsa_url: &str,
340        request: &TsaRequest,
341    ) -> Result<TsaResponse, TsaError> {
342        if tsa_url.starts_with("mock://") {
343            tracing::warn!("Using mock TSA timestamp provider: {}", tsa_url);
344            return Ok(TsaResponse {
345                timestamp: chrono::Utc::now().to_rfc3339(),
346                token: format!("mock-sha256={}", request.hash),
347                tsa_certificate: "placeholder".to_string(),
348            });
349        }
350
351        if !(tsa_url.starts_with("https://") || tsa_url.starts_with("http://")) {
352            return Err(TsaError::UnsupportedScheme(tsa_url.to_string()));
353        }
354
355        let digest_bytes = hex_decode(&request.hash).map_err(TsaError::Encoding)?;
356        let body = build_rfc3161_timestamp_query(&digest_bytes, &request.nonce)?;
357
358        tracing::info!("Requesting TSA token from {}", tsa_url);
359        let client = reqwest::Client::new();
360        let resp = client
361            .post(tsa_url)
362            .header("Content-Type", "application/timestamp-query")
363            .header("Accept", "application/timestamp-reply")
364            .body(body)
365            .send()
366            .await?;
367
368        let status_code = resp.status();
369        if !status_code.is_success() {
370            return Err(TsaError::Server(format!(
371                "HTTP {} from TSA endpoint",
372                status_code
373            )));
374        }
375
376        let date_header = resp
377            .headers()
378            .get(reqwest::header::DATE)
379            .and_then(|v| v.to_str().ok())
380            .map(str::to_string);
381        let bytes = resp.bytes().await?;
382
383        let tsa_reply = parse_timestamp_response(&bytes)?;
384        if tsa_reply.status != 0 && tsa_reply.status != 1 {
385            return Err(TsaError::Server(format!(
386                "TSA rejected request with status {}",
387                tsa_reply.status
388            )));
389        }
390
391        let token_der = tsa_reply
392            .time_stamp_token_der
393            .ok_or(TsaError::InvalidResponse)?;
394
395        // Best-effort timestamp extraction from token bytes (GeneralizedTime scan).
396        // Full CMS/ESS validation is pending.
397        let timestamp = extract_generalized_time_rfc3339(&token_der)
398            .or_else(|| date_header.and_then(parse_http_date_to_rfc3339))
399            .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
400
401        Ok(TsaResponse {
402            timestamp,
403            token: base64_encode(&token_der),
404            tsa_certificate: "unparsed".to_string(),
405        })
406    }
407}
408
409impl TsaTimestamp {
410    /// Best-effort validation/inspection of stored TSA token encoding and timestamp extraction.
411    ///
412    /// This does not perform CMS/PKCS#7 signature validation.
413    pub fn inspect_token(&self) -> TsaTokenInspection {
414        use base64::{engine::general_purpose::STANDARD, Engine as _};
415
416        if self.token.is_empty() {
417            return TsaTokenInspection {
418                token_present: false,
419                token_base64_valid: false,
420                token_der_nonempty: false,
421                extracted_timestamp: None,
422            };
423        }
424
425        let der = match STANDARD.decode(self.token.as_bytes()) {
426            Ok(v) => v,
427            Err(_) => {
428                return TsaTokenInspection {
429                    token_present: true,
430                    token_base64_valid: false,
431                    token_der_nonempty: false,
432                    extracted_timestamp: None,
433                };
434            }
435        };
436
437        let extracted_timestamp = extract_generalized_time_rfc3339(&der);
438        TsaTokenInspection {
439            token_present: true,
440            token_base64_valid: true,
441            token_der_nonempty: !der.is_empty(),
442            extracted_timestamp,
443        }
444    }
445
446    /// Verify the `timeStampToken` CMS/PKCS#7 signature against trusted PEM certificates.
447    ///
448    /// This validates CMS signature and certificate chain. RFC3161-specific TSTInfo checks
449    /// (message imprint, policy, nonce) are not yet enforced here.
450    #[cfg(feature = "tsa-cms-openssl")]
451    pub fn verify_cms_signature_with_pem_roots(
452        &self,
453        trust_store_pem: &[u8],
454    ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
455        use base64::{engine::general_purpose::STANDARD, Engine as _};
456        use openssl::pkcs7::{Pkcs7, Pkcs7Flags};
457        use openssl::stack::Stack;
458        use openssl::x509::{store::X509StoreBuilder, X509};
459
460        if self.token.is_empty() {
461            return Err(TsaCmsVerifyError::TokenMissing);
462        }
463
464        let der = STANDARD
465            .decode(self.token.as_bytes())
466            .map_err(|e| TsaCmsVerifyError::TokenBase64(e.to_string()))?;
467        let extracted_timestamp = extract_generalized_time_rfc3339(&der);
468
469        let pkcs7 =
470            Pkcs7::from_der(&der).map_err(|e| TsaCmsVerifyError::Pkcs7Parse(e.to_string()))?;
471
472        let certs = X509::stack_from_pem(trust_store_pem)
473            .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
474        let mut store_builder =
475            X509StoreBuilder::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
476        for cert in certs {
477            store_builder
478                .add_cert(cert)
479                .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
480        }
481        let store = store_builder.build();
482
483        let cert_stack: Stack<X509> =
484            Stack::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
485        let mut out = Vec::<u8>::new();
486        pkcs7
487            .verify(
488                &cert_stack,
489                &store,
490                None,
491                Some(&mut out),
492                Pkcs7Flags::empty(),
493            )
494            .map_err(|e| TsaCmsVerifyError::Verify(e.to_string()))?;
495
496        Ok(TsaCmsVerification {
497            verified: true,
498            extracted_timestamp,
499        })
500    }
501
502    #[cfg(not(feature = "tsa-cms-openssl"))]
503    pub fn verify_cms_signature_with_pem_roots(
504        &self,
505        _trust_store_pem: &[u8],
506    ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
507        Err(TsaCmsVerifyError::BackendUnavailable(
508            "immutable-logging compiled without feature `tsa-cms-openssl`".to_string(),
509        ))
510    }
511}
512
513/// Files created by a filesystem publication backend.
514#[derive(Debug, Clone)]
515pub struct FilesystemPublication {
516    pub json_path: PathBuf,
517    pub gzip_path: Option<PathBuf>,
518}
519
520/// TSA Request structure (RFC 3161 subset)
521#[derive(Debug, Clone, Serialize, Deserialize)]
522struct TsaRequest {
523    hash: String,
524    algorithm: String,
525    nonce: String,
526}
527
528/// TSA Response structure (RFC 3161 subset)
529#[derive(Debug, Clone, Serialize, Deserialize)]
530struct TsaResponse {
531    timestamp: String,
532    token: String,
533    tsa_certificate: String,
534}
535
536/// TSA Error type
537#[derive(Debug, thiserror::Error)]
538pub enum TsaError {
539    #[error("Network error: {0}")]
540    Network(#[from] reqwest::Error),
541
542    #[error("Encoding error: {0}")]
543    Encoding(String),
544
545    #[error("TSA server error: {0}")]
546    Server(String),
547
548    #[error("Unsupported TSA URL scheme: {0}")]
549    UnsupportedScheme(String),
550
551    #[error("Invalid response from TSA")]
552    InvalidResponse,
553}
554
555/// Base64 encode
556fn base64_encode(data: &[u8]) -> String {
557    use base64::{engine::general_purpose::STANDARD, Engine as _};
558    STANDARD.encode(data)
559}
560
561fn canonical_publication_json_value(
562    publication: &DailyPublication,
563) -> Result<Value, crate::error::LogError> {
564    let signature = match publication.signature.as_ref() {
565        Some(sig) => serde_json::json!({
566            "algorithm": sig.algorithm,
567            "key_id": sig.key_id,
568            "value": sig.value,
569        }),
570        None => Value::Null,
571    };
572
573    let tsa_timestamp = match publication.tsa_timestamp.as_ref() {
574        Some(tsa) => serde_json::json!({
575            "timestamp": tsa.timestamp,
576            "token": tsa.token,
577            "tsa_url": tsa.tsa_url,
578        }),
579        None => Value::Null,
580    };
581
582    serde_json::from_value::<Value>(serde_json::json!({
583        "schema_version": "rsrp-daily-publication-v1",
584        "created_at": publication.created_at,
585        "date": publication.date,
586        "entry_count": publication.entry_count,
587        "hourly_roots": publication.hourly_roots,
588        "previous_day_root": publication.previous_day_root,
589        "root_hash": publication.root_hash,
590        "signature": signature,
591        "tsa_timestamp": tsa_timestamp,
592    }))
593    .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
594}
595
596fn canonical_json_bytes(value: &Value) -> Result<Vec<u8>, crate::error::LogError> {
597    let mut out = String::new();
598    write_canonical_json(value, &mut out)?;
599    Ok(out.into_bytes())
600}
601
602fn write_canonical_json(value: &Value, out: &mut String) -> Result<(), crate::error::LogError> {
603    match value {
604        Value::Null => out.push_str("null"),
605        Value::Bool(v) => out.push_str(if *v { "true" } else { "false" }),
606        Value::Number(v) => out.push_str(&v.to_string()),
607        Value::String(v) => {
608            let encoded = serde_json::to_string(v)
609                .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
610            out.push_str(&encoded);
611        }
612        Value::Array(values) => {
613            out.push('[');
614            for (i, entry) in values.iter().enumerate() {
615                if i > 0 {
616                    out.push(',');
617                }
618                write_canonical_json(entry, out)?;
619            }
620            out.push(']');
621        }
622        Value::Object(map) => {
623            let mut keys: Vec<&str> = map.keys().map(|k| k.as_str()).collect();
624            keys.sort_unstable();
625            out.push('{');
626            for (i, key) in keys.iter().enumerate() {
627                if i > 0 {
628                    out.push(',');
629                }
630                let encoded_key = serde_json::to_string(key)
631                    .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
632                out.push_str(&encoded_key);
633                out.push(':');
634                let value = map.get(*key).ok_or_else(|| {
635                    crate::error::LogError::SerializationError(
636                        "Missing canonical JSON key".to_string(),
637                    )
638                })?;
639                write_canonical_json(value, out)?;
640            }
641            out.push('}');
642        }
643    }
644    Ok(())
645}
646
647fn merkle_leaf_hash(input: &str) -> Vec<u8> {
648    let bytes = hex_decode(input).unwrap_or_else(|_| input.as_bytes().to_vec());
649    let mut hasher = sha2::Sha256::new();
650    hasher.update([0x00]);
651    hasher.update(&bytes);
652    hasher.finalize().to_vec()
653}
654
655fn hex_encode(data: &[u8]) -> String {
656    data.iter().map(|b| format!("{:02x}", b)).collect()
657}
658
659fn hex_decode(s: &str) -> Result<Vec<u8>, String> {
660    if !s.len().is_multiple_of(2) {
661        return Err("Invalid hex length".to_string());
662    }
663    (0..s.len())
664        .step_by(2)
665        .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map_err(|_| "Invalid hex".to_string()))
666        .collect()
667}
668
669fn build_rfc3161_timestamp_query(
670    message_digest: &[u8],
671    nonce_text: &str,
672) -> Result<Vec<u8>, TsaError> {
673    // We support SHA-256 only in this implementation path.
674    if message_digest.len() != 32 {
675        return Err(TsaError::Encoding(format!(
676            "expected SHA-256 digest (32 bytes), got {}",
677            message_digest.len()
678        )));
679    }
680
681    let nonce_hash = sha2::Sha256::digest(nonce_text.as_bytes());
682    let nonce = der_integer_positive(&nonce_hash[..16]);
683
684    let algorithm_identifier = der_sequence(&[
685        der_oid(&[2, 16, 840, 1, 101, 3, 4, 2, 1]), // sha256
686        der_null(),
687    ]);
688    let message_imprint = der_sequence(&[algorithm_identifier, der_octet_string(message_digest)]);
689
690    Ok(der_sequence(&[
691        der_integer_u64(1), // version v1
692        message_imprint,
693        nonce,             // nonce
694        der_boolean(true), // certReq = TRUE
695    ]))
696}
697
698struct ParsedTsaResponse {
699    status: i64,
700    time_stamp_token_der: Option<Vec<u8>>,
701}
702
703fn parse_timestamp_response(bytes: &[u8]) -> Result<ParsedTsaResponse, TsaError> {
704    let (outer_tag, outer_len, outer_hdr) = der_read_tlv(bytes, 0)?;
705    if outer_tag != 0x30 || outer_hdr + outer_len > bytes.len() {
706        return Err(TsaError::InvalidResponse);
707    }
708    let outer = &bytes[outer_hdr..outer_hdr + outer_len];
709
710    let (status_tag, status_len, status_hdr) = der_read_tlv(outer, 0)?;
711    if status_tag != 0x30 || status_hdr + status_len > outer.len() {
712        return Err(TsaError::InvalidResponse);
713    }
714    let status_seq = &outer[status_hdr..status_hdr + status_len];
715    let (int_tag, int_len, int_hdr) = der_read_tlv(status_seq, 0)?;
716    if int_tag != 0x02 || int_hdr + int_len > status_seq.len() {
717        return Err(TsaError::InvalidResponse);
718    }
719    let status = der_parse_integer_i64(&status_seq[int_hdr..int_hdr + int_len])?;
720
721    let next = status_hdr + status_len;
722    let time_stamp_token_der = if next < outer.len() {
723        let (_tag, len, hdr) = der_read_tlv(outer, next)?;
724        Some(outer[next..next + hdr + len].to_vec())
725    } else {
726        None
727    };
728
729    Ok(ParsedTsaResponse {
730        status,
731        time_stamp_token_der,
732    })
733}
734
735fn extract_generalized_time_rfc3339(bytes: &[u8]) -> Option<String> {
736    let mut i = 0usize;
737    while i + 2 <= bytes.len() {
738        if bytes[i] == 0x18 {
739            let (tag, len, hdr) = der_read_tlv(bytes, i).ok()?;
740            if tag != 0x18 || i + hdr + len > bytes.len() {
741                return None;
742            }
743            let s = std::str::from_utf8(&bytes[i + hdr..i + hdr + len]).ok()?;
744            if let Some(trimmed) = s.strip_suffix('Z') {
745                if let Ok(naive) = chrono::NaiveDateTime::parse_from_str(trimmed, "%Y%m%d%H%M%S") {
746                    let dt = chrono::DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
747                    return Some(dt.to_rfc3339());
748                }
749            }
750        }
751        i += 1;
752    }
753    None
754}
755
756fn parse_http_date_to_rfc3339(value: String) -> Option<String> {
757    let dt = chrono::DateTime::parse_from_rfc2822(&value).ok()?;
758    Some(dt.with_timezone(&Utc).to_rfc3339())
759}
760
761fn der_read_tlv(input: &[u8], offset: usize) -> Result<(u8, usize, usize), TsaError> {
762    if offset + 2 > input.len() {
763        return Err(TsaError::InvalidResponse);
764    }
765    let tag = input[offset];
766    let first_len = input[offset + 1];
767    if first_len & 0x80 == 0 {
768        let len = first_len as usize;
769        Ok((tag, len, 2))
770    } else {
771        let n = (first_len & 0x7f) as usize;
772        if n == 0 || n > 4 || offset + 2 + n > input.len() {
773            return Err(TsaError::InvalidResponse);
774        }
775        let mut len = 0usize;
776        for b in &input[offset + 2..offset + 2 + n] {
777            len = (len << 8) | (*b as usize);
778        }
779        Ok((tag, len, 2 + n))
780    }
781}
782
783fn der_parse_integer_i64(bytes: &[u8]) -> Result<i64, TsaError> {
784    if bytes.is_empty() || bytes.len() > 8 {
785        return Err(TsaError::InvalidResponse);
786    }
787    let mut v: i64 = 0;
788    for b in bytes {
789        v = (v << 8) | (*b as i64);
790    }
791    Ok(v)
792}
793
794fn der_len(len: usize) -> Vec<u8> {
795    if len < 128 {
796        return vec![len as u8];
797    }
798    let mut tmp = Vec::new();
799    let mut n = len;
800    while n > 0 {
801        tmp.push((n & 0xff) as u8);
802        n >>= 8;
803    }
804    tmp.reverse();
805    let mut out = vec![0x80 | (tmp.len() as u8)];
806    out.extend(tmp);
807    out
808}
809
810fn der_wrap(tag: u8, value: &[u8]) -> Vec<u8> {
811    let mut out = vec![tag];
812    out.extend(der_len(value.len()));
813    out.extend(value);
814    out
815}
816
817fn der_sequence(parts: &[Vec<u8>]) -> Vec<u8> {
818    let mut content = Vec::new();
819    for part in parts {
820        content.extend(part);
821    }
822    der_wrap(0x30, &content)
823}
824
825fn der_null() -> Vec<u8> {
826    vec![0x05, 0x00]
827}
828
829fn der_boolean(v: bool) -> Vec<u8> {
830    vec![0x01, 0x01, if v { 0xff } else { 0x00 }]
831}
832
833fn der_integer_u64(v: u64) -> Vec<u8> {
834    let mut bytes = if v == 0 {
835        vec![0]
836    } else {
837        let mut tmp = Vec::new();
838        let mut n = v;
839        while n > 0 {
840            tmp.push((n & 0xff) as u8);
841            n >>= 8;
842        }
843        tmp.reverse();
844        tmp
845    };
846    if bytes[0] & 0x80 != 0 {
847        bytes.insert(0, 0);
848    }
849    der_wrap(0x02, &bytes)
850}
851
852fn der_integer_positive(bytes: &[u8]) -> Vec<u8> {
853    let mut v = bytes.to_vec();
854    while v.first() == Some(&0) && v.len() > 1 {
855        v.remove(0);
856    }
857    if v.first().map(|b| b & 0x80 != 0).unwrap_or(false) {
858        v.insert(0, 0);
859    }
860    der_wrap(0x02, &v)
861}
862
863fn der_octet_string(bytes: &[u8]) -> Vec<u8> {
864    der_wrap(0x04, bytes)
865}
866
867fn der_oid(oid: &[u32]) -> Vec<u8> {
868    let mut out = Vec::new();
869    if oid.len() < 2 {
870        return der_wrap(0x06, &out);
871    }
872    out.push((oid[0] * 40 + oid[1]) as u8);
873    for &arc in &oid[2..] {
874        let mut stack = [0u8; 5];
875        let mut idx = stack.len();
876        let mut n = arc;
877        stack[idx - 1] = (n & 0x7f) as u8;
878        idx -= 1;
879        n >>= 7;
880        while n > 0 {
881            stack[idx - 1] = 0x80 | ((n & 0x7f) as u8);
882            idx -= 1;
883            n >>= 7;
884        }
885        out.extend(&stack[idx..]);
886    }
887    der_wrap(0x06, &out)
888}
889
890#[cfg(test)]
891mod tests {
892    use super::*;
893    use base64::{engine::general_purpose::STANDARD, Engine as _};
894    use std::io::Read;
895    use tempfile::tempdir;
896
897    #[test]
898    fn test_daily_publication_and_signature_chain() {
899        let mut service = PublicationService::new();
900        let hourly_roots = vec!["a".repeat(64), "b".repeat(64)];
901
902        let mut day1 = service.create_daily_publication(&hourly_roots, 42);
903        assert_eq!(day1.entry_count, 42);
904        assert_eq!(day1.hourly_roots.len(), 2);
905        assert_eq!(day1.previous_day_root, "0".repeat(64));
906        assert!(day1.signature.is_none());
907
908        service.sign_publication(&mut day1, b"sig");
909        let sig = day1.signature.as_ref().expect("signature set");
910        assert_eq!(sig.algorithm, "RSA-PSS-SHA256");
911        assert_eq!(sig.value, STANDARD.encode(b"sig"));
912
913        let day2 = service.create_daily_publication(&hourly_roots, 1);
914        assert_eq!(day2.previous_day_root, day1.root_hash);
915    }
916
917    #[test]
918    fn test_add_tsa_timestamp_mock_only() {
919        let mut service = PublicationService::new();
920        let hourly_roots = vec!["c".repeat(64)];
921        let mut publication = service.create_daily_publication(&hourly_roots, 1);
922
923        let rt = tokio::runtime::Builder::new_current_thread()
924            .build()
925            .expect("runtime");
926
927        rt.block_on(async {
928            service
929                .add_tsa_timestamp(&mut publication, "mock://tsa")
930                .await
931                .expect("mock TSA works");
932        });
933
934        let tsa = publication
935            .tsa_timestamp
936            .as_ref()
937            .expect("tsa timestamp set");
938        assert_eq!(tsa.tsa_url, "mock://tsa");
939        assert!(tsa.token.starts_with("mock-sha256="));
940    }
941
942    #[test]
943    fn test_add_tsa_timestamp_rejects_non_mock() {
944        let mut service = PublicationService::new();
945        let hourly_roots = vec!["d".repeat(64)];
946        let mut publication = service.create_daily_publication(&hourly_roots, 1);
947
948        let rt = tokio::runtime::Builder::new_current_thread()
949            .build()
950            .expect("runtime");
951
952        let err = rt.block_on(async {
953            service
954                .add_tsa_timestamp(&mut publication, "https://tsa.example")
955                .await
956                .expect_err("network call should fail for placeholder endpoint")
957        });
958
959        match err {
960            TsaError::Server(_) | TsaError::Network(_) => {}
961            other => panic!("unexpected error: {other}"),
962        }
963        assert!(publication.tsa_timestamp.is_none());
964    }
965
966    #[test]
967    fn test_build_rfc3161_query_der_contains_sha256_oid() {
968        let digest = [0x11u8; 32];
969        let req = build_rfc3161_timestamp_query(&digest, "nonce").expect("query");
970        // sha256 OID bytes: 06 09 60 86 48 01 65 03 04 02 01
971        let oid = [
972            0x06, 0x09, 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01,
973        ];
974        assert!(req.windows(oid.len()).any(|w| w == oid));
975    }
976
977    #[test]
978    fn test_parse_timestamp_response_status_only() {
979        // TimeStampResp ::= SEQUENCE { status PKIStatusInfo }
980        let resp = [0x30, 0x05, 0x30, 0x03, 0x02, 0x01, 0x00];
981        let parsed = parse_timestamp_response(&resp).expect("parse");
982        assert_eq!(parsed.status, 0);
983        assert!(parsed.time_stamp_token_der.is_none());
984    }
985
986    #[test]
987    fn test_extract_generalized_time_best_effort() {
988        // DER GeneralizedTime: "20260226083045Z"
989        let mut bytes = vec![0x18, 0x0f];
990        bytes.extend_from_slice(b"20260226083045Z");
991        let ts = extract_generalized_time_rfc3339(&bytes).expect("timestamp");
992        assert!(ts.starts_with("2026-02-26T08:30:45"));
993    }
994
995    #[test]
996    fn test_canonical_json_export_is_deterministic() {
997        let service = PublicationService::new();
998        let publication = service.create_daily_publication(&["e".repeat(64)], 7);
999
1000        let json1 = publication.to_canonical_json().expect("json1");
1001        let json2 = publication.to_canonical_json().expect("json2");
1002
1003        assert_eq!(json1, json2);
1004        assert!(!json1.contains('\n'));
1005        assert!(json1.contains("\"entry_count\":7"));
1006        assert!(json1.contains("\"hourly_roots\""));
1007    }
1008
1009    #[test]
1010    fn test_canonical_json_gzip_roundtrip() {
1011        let service = PublicationService::new();
1012        let publication = service.create_daily_publication(&["f".repeat(64)], 3);
1013
1014        let original = publication.to_canonical_json_bytes().expect("original");
1015        let compressed = publication.to_canonical_json_gzip().expect("gzip");
1016        assert!(!compressed.is_empty());
1017
1018        let mut decoder = flate2::read::GzDecoder::new(compressed.as_slice());
1019        let mut decompressed = Vec::new();
1020        decoder.read_to_end(&mut decompressed).expect("decompress");
1021
1022        assert_eq!(decompressed, original);
1023    }
1024
1025    #[test]
1026    fn test_publication_basename_is_stable() {
1027        let service = PublicationService::new();
1028        let publication = service.create_daily_publication(&["bb".repeat(32)], 1);
1029        let base = publication.publication_basename();
1030
1031        assert!(base.starts_with("daily-publication-"));
1032        assert!(base.contains(&publication.date));
1033        assert!(base.ends_with(&publication.root_hash[..16]));
1034    }
1035
1036    #[test]
1037    fn test_verify_root_hash_detects_tamper() {
1038        let service = PublicationService::new();
1039        let mut publication =
1040            service.create_daily_publication(&["aa".repeat(32), "bb".repeat(32)], 2);
1041        assert!(publication.verify_root_hash());
1042
1043        publication.hourly_roots.push("cc".repeat(32));
1044        assert!(!publication.verify_root_hash());
1045    }
1046
1047    #[test]
1048    fn test_tsa_token_inspection() {
1049        let tsa = TsaTimestamp {
1050            tsa_url: "https://tsa.example".to_string(),
1051            timestamp: "2026-02-26T00:00:00Z".to_string(),
1052            token: base64_encode(&[
1053                0x18, 0x0f, b'2', b'0', b'2', b'6', b'0', b'2', b'2', b'6', b'0', b'8', b'3', b'0',
1054                b'4', b'5', b'Z',
1055            ]),
1056        };
1057        let inspected = tsa.inspect_token();
1058        assert!(inspected.token_present);
1059        assert!(inspected.token_base64_valid);
1060        assert!(inspected.token_der_nonempty);
1061        assert!(inspected.extracted_timestamp.is_some());
1062
1063        let bad = TsaTimestamp {
1064            tsa_url: "https://tsa.example".to_string(),
1065            timestamp: "2026-02-26T00:00:00Z".to_string(),
1066            token: "%%%".to_string(),
1067        };
1068        let bad_inspected = bad.inspect_token();
1069        assert!(bad_inspected.token_present);
1070        assert!(!bad_inspected.token_base64_valid);
1071    }
1072
1073    #[cfg(feature = "tsa-cms-openssl")]
1074    #[test]
1075    fn test_tsa_cms_verify_rejects_invalid_base64() {
1076        let tsa = TsaTimestamp {
1077            tsa_url: "https://tsa.example".to_string(),
1078            timestamp: "2026-02-26T00:00:00Z".to_string(),
1079            token: "%%%".to_string(),
1080        };
1081
1082        let err = tsa
1083            .verify_cms_signature_with_pem_roots(b"")
1084            .expect_err("invalid base64 must fail");
1085        match err {
1086            TsaCmsVerifyError::TokenBase64(_) => {}
1087            other => panic!("unexpected error: {other}"),
1088        }
1089    }
1090
1091    #[cfg(feature = "tsa-cms-openssl")]
1092    #[test]
1093    fn test_tsa_cms_verify_rejects_non_pkcs7_der() {
1094        let tsa = TsaTimestamp {
1095            tsa_url: "https://tsa.example".to_string(),
1096            timestamp: "2026-02-26T00:00:00Z".to_string(),
1097            token: base64_encode(&[0x30, 0x03, 0x02, 0x01, 0x00]),
1098        };
1099
1100        let err = tsa
1101            .verify_cms_signature_with_pem_roots(b"")
1102            .expect_err("non-pkcs7 der must fail");
1103        match err {
1104            TsaCmsVerifyError::Pkcs7Parse(_) | TsaCmsVerifyError::TrustStore(_) => {}
1105            other => panic!("unexpected error: {other}"),
1106        }
1107    }
1108
1109    #[cfg(not(feature = "tsa-cms-openssl"))]
1110    #[test]
1111    fn test_tsa_cms_verify_reports_backend_unavailable_without_feature() {
1112        let tsa = TsaTimestamp {
1113            tsa_url: "https://tsa.example".to_string(),
1114            timestamp: "2026-02-26T00:00:00Z".to_string(),
1115            token: "%%%".to_string(),
1116        };
1117
1118        let err = tsa
1119            .verify_cms_signature_with_pem_roots(b"")
1120            .expect_err("backend should be unavailable without feature");
1121        match err {
1122            TsaCmsVerifyError::BackendUnavailable(_) => {}
1123            other => panic!("unexpected error: {other}"),
1124        }
1125    }
1126
1127    #[test]
1128    fn test_publish_to_filesystem_writes_json_and_gzip() {
1129        let tmp = tempdir().expect("tempdir");
1130        let service = PublicationService::new();
1131        let publication = service.create_daily_publication(&["aa".repeat(32)], 11);
1132
1133        let written = service
1134            .publish_to_filesystem(&publication, tmp.path(), true)
1135            .expect("publish");
1136
1137        assert!(written.json_path.exists());
1138        let gzip_path = written.gzip_path.as_ref().expect("gzip path");
1139        assert!(gzip_path.exists());
1140
1141        let json_bytes = std::fs::read(&written.json_path).expect("json bytes");
1142        assert_eq!(
1143            json_bytes,
1144            publication
1145                .to_canonical_json_bytes()
1146                .expect("canonical json")
1147        );
1148
1149        let gz_bytes = std::fs::read(gzip_path).expect("gzip bytes");
1150        let mut decoder = flate2::read::GzDecoder::new(gz_bytes.as_slice());
1151        let mut out = Vec::new();
1152        decoder.read_to_end(&mut out).expect("decompress");
1153        assert_eq!(out, json_bytes);
1154    }
1155}