Skip to main content

immutable_logging/
publication.rs

1//! Publication - Daily audit publication
2
3use serde::{Deserialize, Serialize};
4use chrono::Utc;
5use std::io::Write;
6use sha2::Digest as _;
7use std::path::{Path, PathBuf};
8
9/// Daily audit publication
10#[derive(Debug, Clone, Serialize, Deserialize)]
11pub struct DailyPublication {
12    /// Publication date
13    pub date: String,
14    /// Root hash of all hourly roots
15    pub root_hash: String,
16    /// Total entry count
17    pub entry_count: u64,
18    /// Hourly root hashes
19    pub hourly_roots: Vec<String>,
20    /// Previous day root (for chaining)
21    pub previous_day_root: String,
22    /// Creation timestamp
23    pub created_at: String,
24    /// Signature
25    pub signature: Option<PublicationSignature>,
26    /// TSA timestamp
27    pub tsa_timestamp: Option<TsaTimestamp>,
28}
29
30impl DailyPublication {
31    /// Export as compact deterministic JSON bytes (struct-field-order based).
32    pub fn to_canonical_json_bytes(&self) -> Result<Vec<u8>, crate::error::LogError> {
33        serde_json::to_vec(self)
34            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
35    }
36
37    /// Export as compact deterministic JSON string.
38    pub fn to_canonical_json(&self) -> Result<String, crate::error::LogError> {
39        let bytes = self.to_canonical_json_bytes()?;
40        String::from_utf8(bytes)
41            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
42    }
43
44    /// Export as gzip-compressed canonical JSON.
45    pub fn to_canonical_json_gzip(&self) -> Result<Vec<u8>, crate::error::LogError> {
46        let json = self.to_canonical_json_bytes()?;
47        let mut encoder =
48            flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
49        encoder
50            .write_all(&json)
51            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))?;
52        encoder
53            .finish()
54            .map_err(|e| crate::error::LogError::SerializationError(e.to_string()))
55    }
56
57    /// Build a deterministic basename suitable for filesystem/object publication backends.
58    pub fn publication_basename(&self) -> String {
59        let root_prefix = self.root_hash.get(..16).unwrap_or(&self.root_hash);
60        format!("daily-publication-{}-{}", self.date, root_prefix)
61    }
62
63    /// Recompute the publication root from `hourly_roots`.
64    pub fn recompute_root_hash(&self) -> String {
65        PublicationService::compute_merkle_root(&self.hourly_roots)
66    }
67
68    /// Check whether the stored `root_hash` matches the recomputed value.
69    pub fn verify_root_hash(&self) -> bool {
70        self.root_hash == self.recompute_root_hash()
71    }
72
73    /// Write canonical JSON to a file path.
74    pub fn write_canonical_json_file<P: AsRef<Path>>(
75        &self,
76        path: P,
77    ) -> Result<(), crate::error::LogError> {
78        let bytes = self.to_canonical_json_bytes()?;
79        std::fs::write(path, bytes)
80            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
81    }
82
83    /// Write gzip-compressed canonical JSON to a file path.
84    pub fn write_canonical_json_gzip_file<P: AsRef<Path>>(
85        &self,
86        path: P,
87    ) -> Result<(), crate::error::LogError> {
88        let bytes = self.to_canonical_json_gzip()?;
89        std::fs::write(path, bytes)
90            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))
91    }
92}
93
94/// Publication signature
95#[derive(Debug, Clone, Serialize, Deserialize)]
96pub struct PublicationSignature {
97    pub algorithm: String,
98    pub key_id: String,
99    pub value: String,
100}
101
102/// TSA timestamp
103#[derive(Debug, Clone, Serialize, Deserialize)]
104pub struct TsaTimestamp {
105    pub tsa_url: String,
106    pub timestamp: String,
107    pub token: String,
108}
109
110/// Best-effort inspection result for a stored TSA token.
111#[derive(Debug, Clone, Serialize, Deserialize)]
112pub struct TsaTokenInspection {
113    pub token_present: bool,
114    pub token_base64_valid: bool,
115    pub token_der_nonempty: bool,
116    pub extracted_timestamp: Option<String>,
117}
118
119/// Cryptographic CMS/PKCS#7 verification result for a TSA token.
120#[derive(Debug, Clone, Serialize, Deserialize)]
121pub struct TsaCmsVerification {
122    pub verified: bool,
123    pub extracted_timestamp: Option<String>,
124}
125
126/// TSA token CMS verification error.
127#[derive(Debug, thiserror::Error)]
128pub enum TsaCmsVerifyError {
129    #[error("TSA CMS verification backend unavailable: {0}")]
130    BackendUnavailable(String),
131    #[error("TSA token missing")]
132    TokenMissing,
133    #[error("TSA token base64 decode failed: {0}")]
134    TokenBase64(String),
135    #[error("TSA token PKCS#7 parse failed: {0}")]
136    Pkcs7Parse(String),
137    #[error("TSA trust store error: {0}")]
138    TrustStore(String),
139    #[error("TSA CMS verification failed: {0}")]
140    Verify(String),
141}
142
143/// Publication service
144pub struct PublicationService {
145    /// Previous day root
146    previous_day_root: Option<String>,
147}
148
149impl PublicationService {
150    /// Create new publication service
151    pub fn new() -> Self {
152        PublicationService {
153            previous_day_root: None,
154        }
155    }
156    
157    /// Create daily publication
158    pub fn create_daily_publication(
159        &self,
160        hourly_roots: &[String],
161        entry_count: u64,
162    ) -> DailyPublication {
163        let date = Utc::now().format("%Y-%m-%d").to_string();
164        let previous = self.previous_day_root.clone().unwrap_or_else(|| {
165            "0000000000000000000000000000000000000000000000000000000000000000".to_string()
166        });
167        
168        // Compute root hash of all hourly roots
169        let root_hash = Self::compute_merkle_root(hourly_roots);
170        
171        DailyPublication {
172            date,
173            root_hash,
174            entry_count,
175            hourly_roots: hourly_roots.to_vec(),
176            previous_day_root: previous,
177            created_at: Utc::now().to_rfc3339(),
178            signature: None,
179            tsa_timestamp: None,
180        }
181    }
182    
183    /// Compute merkle root from list of hashes
184    fn compute_merkle_root(hashes: &[String]) -> String {
185        if hashes.is_empty() {
186            return "0000000000000000000000000000000000000000000000000000000000000000".to_string();
187        }
188        
189        use sha2::{Sha256, Digest};
190        
191        let mut current: Vec<String> = hashes.to_vec();
192        
193        while current.len() > 1 {
194            let mut next = Vec::new();
195            
196            for chunk in current.chunks(2) {
197                if chunk.len() == 2 {
198                    let mut hasher = Sha256::new();
199                    hasher.update(chunk[0].as_bytes());
200                    hasher.update(chunk[1].as_bytes());
201                    next.push(format!("{:x}", hasher.finalize()));
202                } else {
203                    next.push(chunk[0].clone());
204                }
205            }
206            
207            current = next;
208        }
209        
210        current[0].clone()
211    }
212    
213    /// Sign publication
214    pub fn sign_publication(&mut self, publication: &mut DailyPublication, signature: &[u8]) {
215        self.sign_publication_with_metadata(
216            publication,
217            signature,
218            "RSA-PSS-SHA256",
219            "rnbc-audit-sig-2026",
220        );
221    }
222
223    /// Sign publication with explicit metadata (useful for API-driven integrations).
224    pub fn sign_publication_with_metadata(
225        &mut self,
226        publication: &mut DailyPublication,
227        signature: &[u8],
228        algorithm: &str,
229        key_id: &str,
230    ) {
231        publication.signature = Some(PublicationSignature {
232            algorithm: algorithm.to_string(),
233            key_id: key_id.to_string(),
234            value: base64_encode(signature),
235        });
236        
237        // Store previous day root for chaining
238        self.previous_day_root = Some(publication.root_hash.clone());
239    }
240
241    /// Publish to a local filesystem directory (precursor to WORM/object storage backends).
242    pub fn publish_to_filesystem<P: AsRef<Path>>(
243        &self,
244        publication: &DailyPublication,
245        directory: P,
246        write_gzip: bool,
247    ) -> Result<FilesystemPublication, crate::error::LogError> {
248        let dir = directory.as_ref();
249        std::fs::create_dir_all(dir)
250            .map_err(|e| crate::error::LogError::PublicationError(e.to_string()))?;
251
252        let basename = publication.publication_basename();
253        let json_path = dir.join(format!("{basename}.json"));
254        publication.write_canonical_json_file(&json_path)?;
255
256        let gzip_path = if write_gzip {
257            let path = dir.join(format!("{basename}.json.gz"));
258            publication.write_canonical_json_gzip_file(&path)?;
259            Some(path)
260        } else {
261            None
262        };
263
264        Ok(FilesystemPublication { json_path, gzip_path })
265    }
266     
267    /// Add TSA timestamp metadata.
268    ///
269    /// `mock://` URLs are supported for local testing.
270    ///
271    /// `http(s)://` URLs use an experimental RFC 3161 request path that retrieves
272    /// and stores the TSA token, but does not yet perform full CMS/token validation.
273    pub async fn add_tsa_timestamp(
274        &mut self,
275        publication: &mut DailyPublication,
276        tsa_url: &str,
277    ) -> Result<(), TsaError> {
278        // Serialize publication hash for TSA request
279        let hash_to_timestamp = &publication.root_hash;
280        
281        // In production, this would be a proper RFC 3161 request
282        // For now, we'll implement a basic timestamp request structure
283        let timestamp_request = TsaRequest {
284            hash: hash_to_timestamp.clone(),
285            algorithm: "SHA256".to_string(),
286            nonce: uuid::Uuid::new_v4().to_string(),
287        };
288        
289        // Make request to TSA (in production, use actual TSA server)
290        let response = self.request_timestamp(&tsa_url, &timestamp_request).await?;
291        
292        publication.tsa_timestamp = Some(TsaTimestamp {
293            tsa_url: tsa_url.to_string(),
294            timestamp: response.timestamp,
295            token: response.token,
296        });
297        
298        tracing::info!(
299            "TSA timestamp added for publication {} at {}",
300            publication.date,
301            publication
302                .tsa_timestamp
303                .as_ref()
304                .map(|t| t.timestamp.as_str())
305                .map_or("unknown", |v| v)
306        );
307        
308        Ok(())
309    }
310    
311    /// Request timestamp from TSA server.
312    ///
313    /// Supports:
314    /// - `mock://...` for tests
315    /// - `http(s)://...` experimental RFC 3161 transport (token retrieval only)
316    async fn request_timestamp(&self, tsa_url: &str, request: &TsaRequest) -> Result<TsaResponse, TsaError> {
317        if tsa_url.starts_with("mock://") {
318            tracing::warn!("Using mock TSA timestamp provider: {}", tsa_url);
319            return Ok(TsaResponse {
320                timestamp: chrono::Utc::now().to_rfc3339(),
321                token: format!("mock-sha256={}", request.hash),
322                tsa_certificate: "placeholder".to_string(),
323            });
324        }
325
326        if !(tsa_url.starts_with("https://") || tsa_url.starts_with("http://")) {
327            return Err(TsaError::UnsupportedScheme(tsa_url.to_string()));
328        }
329
330        let digest_bytes = hex_decode(&request.hash).map_err(TsaError::Encoding)?;
331        let body = build_rfc3161_timestamp_query(&digest_bytes, &request.nonce)?;
332
333        tracing::info!("Requesting TSA token from {}", tsa_url);
334        let client = reqwest::Client::new();
335        let resp = client
336            .post(tsa_url)
337            .header("Content-Type", "application/timestamp-query")
338            .header("Accept", "application/timestamp-reply")
339            .body(body)
340            .send()
341            .await?;
342
343        let status_code = resp.status();
344        if !status_code.is_success() {
345            return Err(TsaError::Server(format!(
346                "HTTP {} from TSA endpoint",
347                status_code
348            )));
349        }
350
351        let date_header = resp
352            .headers()
353            .get(reqwest::header::DATE)
354            .and_then(|v| v.to_str().ok())
355            .map(str::to_string);
356        let bytes = resp.bytes().await?;
357
358        let tsa_reply = parse_timestamp_response(&bytes)?;
359        if tsa_reply.status != 0 && tsa_reply.status != 1 {
360            return Err(TsaError::Server(format!(
361                "TSA rejected request with status {}",
362                tsa_reply.status
363            )));
364        }
365
366        let token_der = tsa_reply
367            .time_stamp_token_der
368            .ok_or(TsaError::InvalidResponse)?;
369
370        // Best-effort timestamp extraction from token bytes (GeneralizedTime scan).
371        // Full CMS/ESS validation is pending.
372        let timestamp = extract_generalized_time_rfc3339(&token_der)
373            .or_else(|| date_header.and_then(parse_http_date_to_rfc3339))
374            .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
375
376        Ok(TsaResponse {
377            timestamp,
378            token: base64_encode(&token_der),
379            tsa_certificate: "unparsed".to_string(),
380        })
381    }
382}
383
384impl TsaTimestamp {
385    /// Best-effort validation/inspection of stored TSA token encoding and timestamp extraction.
386    ///
387    /// This does not perform CMS/PKCS#7 signature validation.
388    pub fn inspect_token(&self) -> TsaTokenInspection {
389        use base64::{Engine as _, engine::general_purpose::STANDARD};
390
391        if self.token.is_empty() {
392            return TsaTokenInspection {
393                token_present: false,
394                token_base64_valid: false,
395                token_der_nonempty: false,
396                extracted_timestamp: None,
397            };
398        }
399
400        let der = match STANDARD.decode(self.token.as_bytes()) {
401            Ok(v) => v,
402            Err(_) => {
403                return TsaTokenInspection {
404                    token_present: true,
405                    token_base64_valid: false,
406                    token_der_nonempty: false,
407                    extracted_timestamp: None,
408                };
409            }
410        };
411
412        let extracted_timestamp = extract_generalized_time_rfc3339(&der);
413        TsaTokenInspection {
414            token_present: true,
415            token_base64_valid: true,
416            token_der_nonempty: !der.is_empty(),
417            extracted_timestamp,
418        }
419    }
420
421    /// Verify the `timeStampToken` CMS/PKCS#7 signature against trusted PEM certificates.
422    ///
423    /// This validates CMS signature and certificate chain. RFC3161-specific TSTInfo checks
424    /// (message imprint, policy, nonce) are not yet enforced here.
425    #[cfg(feature = "tsa-cms-openssl")]
426    pub fn verify_cms_signature_with_pem_roots(
427        &self,
428        trust_store_pem: &[u8],
429    ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
430        use base64::{Engine as _, engine::general_purpose::STANDARD};
431        use openssl::pkcs7::{Pkcs7, Pkcs7Flags};
432        use openssl::stack::Stack;
433        use openssl::x509::{X509, store::X509StoreBuilder};
434
435        if self.token.is_empty() {
436            return Err(TsaCmsVerifyError::TokenMissing);
437        }
438
439        let der = STANDARD
440            .decode(self.token.as_bytes())
441            .map_err(|e| TsaCmsVerifyError::TokenBase64(e.to_string()))?;
442        let extracted_timestamp = extract_generalized_time_rfc3339(&der);
443
444        let pkcs7 = Pkcs7::from_der(&der)
445            .map_err(|e| TsaCmsVerifyError::Pkcs7Parse(e.to_string()))?;
446
447        let certs = X509::stack_from_pem(trust_store_pem)
448            .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
449        let mut store_builder = X509StoreBuilder::new()
450            .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
451        for cert in certs {
452            store_builder
453                .add_cert(cert)
454                .map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
455        }
456        let store = store_builder.build();
457
458        let cert_stack: Stack<X509> =
459            Stack::new().map_err(|e| TsaCmsVerifyError::TrustStore(e.to_string()))?;
460        let mut out = Vec::<u8>::new();
461        pkcs7
462            .verify(
463                &cert_stack,
464                &store,
465                None,
466                Some(&mut out),
467                Pkcs7Flags::empty(),
468            )
469            .map_err(|e| TsaCmsVerifyError::Verify(e.to_string()))?;
470
471        Ok(TsaCmsVerification {
472            verified: true,
473            extracted_timestamp,
474        })
475    }
476
477    #[cfg(not(feature = "tsa-cms-openssl"))]
478    pub fn verify_cms_signature_with_pem_roots(
479        &self,
480        _trust_store_pem: &[u8],
481    ) -> Result<TsaCmsVerification, TsaCmsVerifyError> {
482        Err(TsaCmsVerifyError::BackendUnavailable(
483            "immutable-logging compiled without feature `tsa-cms-openssl`".to_string(),
484        ))
485    }
486}
487
488/// Files created by a filesystem publication backend.
489#[derive(Debug, Clone)]
490pub struct FilesystemPublication {
491    pub json_path: PathBuf,
492    pub gzip_path: Option<PathBuf>,
493}
494
495/// TSA Request structure (RFC 3161 subset)
496#[derive(Debug, Clone, Serialize, Deserialize)]
497struct TsaRequest {
498    hash: String,
499    algorithm: String,
500    nonce: String,
501}
502
503/// TSA Response structure (RFC 3161 subset)
504#[derive(Debug, Clone, Serialize, Deserialize)]
505struct TsaResponse {
506    timestamp: String,
507    token: String,
508    tsa_certificate: String,
509}
510
511/// TSA Error type
512#[derive(Debug, thiserror::Error)]
513pub enum TsaError {
514    #[error("Network error: {0}")]
515    Network(#[from] reqwest::Error),
516
517    #[error("Encoding error: {0}")]
518    Encoding(String),
519    
520    #[error("TSA server error: {0}")]
521    Server(String),
522
523    #[error("Unsupported TSA URL scheme: {0}")]
524    UnsupportedScheme(String),
525    
526    #[error("Invalid response from TSA")]
527    InvalidResponse,
528}
529
530/// Base64 encode
531fn base64_encode(data: &[u8]) -> String {
532    use base64::{Engine as _, engine::general_purpose::STANDARD};
533    STANDARD.encode(data)
534}
535
536fn hex_decode(s: &str) -> Result<Vec<u8>, String> {
537    if s.len() % 2 != 0 {
538        return Err("Invalid hex length".to_string());
539    }
540    (0..s.len())
541        .step_by(2)
542        .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map_err(|_| "Invalid hex".to_string()))
543        .collect()
544}
545
546fn build_rfc3161_timestamp_query(message_digest: &[u8], nonce_text: &str) -> Result<Vec<u8>, TsaError> {
547    // We support SHA-256 only in this implementation path.
548    if message_digest.len() != 32 {
549        return Err(TsaError::Encoding(format!(
550            "expected SHA-256 digest (32 bytes), got {}",
551            message_digest.len()
552        )));
553    }
554
555    let nonce_hash = sha2::Sha256::digest(nonce_text.as_bytes());
556    let nonce = der_integer_positive(&nonce_hash[..16]);
557
558    let algorithm_identifier = der_sequence(&[
559        der_oid(&[2, 16, 840, 1, 101, 3, 4, 2, 1]), // sha256
560        der_null(),
561    ]);
562    let message_imprint = der_sequence(&[
563        algorithm_identifier,
564        der_octet_string(message_digest),
565    ]);
566
567    Ok(der_sequence(&[
568        der_integer_u64(1),      // version v1
569        message_imprint,
570        nonce,                    // nonce
571        der_boolean(true),        // certReq = TRUE
572    ]))
573}
574
575struct ParsedTsaResponse {
576    status: i64,
577    time_stamp_token_der: Option<Vec<u8>>,
578}
579
580fn parse_timestamp_response(bytes: &[u8]) -> Result<ParsedTsaResponse, TsaError> {
581    let (outer_tag, outer_len, outer_hdr) = der_read_tlv(bytes, 0)?;
582    if outer_tag != 0x30 || outer_hdr + outer_len > bytes.len() {
583        return Err(TsaError::InvalidResponse);
584    }
585    let outer = &bytes[outer_hdr..outer_hdr + outer_len];
586
587    let (status_tag, status_len, status_hdr) = der_read_tlv(outer, 0)?;
588    if status_tag != 0x30 || status_hdr + status_len > outer.len() {
589        return Err(TsaError::InvalidResponse);
590    }
591    let status_seq = &outer[status_hdr..status_hdr + status_len];
592    let (int_tag, int_len, int_hdr) = der_read_tlv(status_seq, 0)?;
593    if int_tag != 0x02 || int_hdr + int_len > status_seq.len() {
594        return Err(TsaError::InvalidResponse);
595    }
596    let status = der_parse_integer_i64(&status_seq[int_hdr..int_hdr + int_len])?;
597
598    let next = status_hdr + status_len;
599    let time_stamp_token_der = if next < outer.len() {
600        let (_tag, len, hdr) = der_read_tlv(outer, next)?;
601        Some(outer[next..next + hdr + len].to_vec())
602    } else {
603        None
604    };
605
606    Ok(ParsedTsaResponse {
607        status,
608        time_stamp_token_der,
609    })
610}
611
612fn extract_generalized_time_rfc3339(bytes: &[u8]) -> Option<String> {
613    let mut i = 0usize;
614    while i + 2 <= bytes.len() {
615        if bytes[i] == 0x18 {
616            let (tag, len, hdr) = der_read_tlv(bytes, i).ok()?;
617            if tag != 0x18 || i + hdr + len > bytes.len() {
618                return None;
619            }
620            let s = std::str::from_utf8(&bytes[i + hdr..i + hdr + len]).ok()?;
621            if let Some(trimmed) = s.strip_suffix('Z') {
622                if let Ok(naive) = chrono::NaiveDateTime::parse_from_str(trimmed, "%Y%m%d%H%M%S")
623                {
624                    let dt = chrono::DateTime::<Utc>::from_naive_utc_and_offset(naive, Utc);
625                    return Some(dt.to_rfc3339());
626                }
627            }
628        }
629        i += 1;
630    }
631    None
632}
633
634fn parse_http_date_to_rfc3339(value: String) -> Option<String> {
635    let dt = chrono::DateTime::parse_from_rfc2822(&value).ok()?;
636    Some(dt.with_timezone(&Utc).to_rfc3339())
637}
638
639fn der_read_tlv(input: &[u8], offset: usize) -> Result<(u8, usize, usize), TsaError> {
640    if offset + 2 > input.len() {
641        return Err(TsaError::InvalidResponse);
642    }
643    let tag = input[offset];
644    let first_len = input[offset + 1];
645    if first_len & 0x80 == 0 {
646        let len = first_len as usize;
647        Ok((tag, len, 2))
648    } else {
649        let n = (first_len & 0x7f) as usize;
650        if n == 0 || n > 4 || offset + 2 + n > input.len() {
651            return Err(TsaError::InvalidResponse);
652        }
653        let mut len = 0usize;
654        for b in &input[offset + 2..offset + 2 + n] {
655            len = (len << 8) | (*b as usize);
656        }
657        Ok((tag, len, 2 + n))
658    }
659}
660
661fn der_parse_integer_i64(bytes: &[u8]) -> Result<i64, TsaError> {
662    if bytes.is_empty() || bytes.len() > 8 {
663        return Err(TsaError::InvalidResponse);
664    }
665    let mut v: i64 = 0;
666    for b in bytes {
667        v = (v << 8) | (*b as i64);
668    }
669    Ok(v)
670}
671
672fn der_len(len: usize) -> Vec<u8> {
673    if len < 128 {
674        return vec![len as u8];
675    }
676    let mut tmp = Vec::new();
677    let mut n = len;
678    while n > 0 {
679        tmp.push((n & 0xff) as u8);
680        n >>= 8;
681    }
682    tmp.reverse();
683    let mut out = vec![0x80 | (tmp.len() as u8)];
684    out.extend(tmp);
685    out
686}
687
688fn der_wrap(tag: u8, value: &[u8]) -> Vec<u8> {
689    let mut out = vec![tag];
690    out.extend(der_len(value.len()));
691    out.extend(value);
692    out
693}
694
695fn der_sequence(parts: &[Vec<u8>]) -> Vec<u8> {
696    let mut content = Vec::new();
697    for part in parts {
698        content.extend(part);
699    }
700    der_wrap(0x30, &content)
701}
702
703fn der_null() -> Vec<u8> {
704    vec![0x05, 0x00]
705}
706
707fn der_boolean(v: bool) -> Vec<u8> {
708    vec![0x01, 0x01, if v { 0xff } else { 0x00 }]
709}
710
711fn der_integer_u64(v: u64) -> Vec<u8> {
712    let mut bytes = if v == 0 {
713        vec![0]
714    } else {
715        let mut tmp = Vec::new();
716        let mut n = v;
717        while n > 0 {
718            tmp.push((n & 0xff) as u8);
719            n >>= 8;
720        }
721        tmp.reverse();
722        tmp
723    };
724    if bytes[0] & 0x80 != 0 {
725        bytes.insert(0, 0);
726    }
727    der_wrap(0x02, &bytes)
728}
729
730fn der_integer_positive(bytes: &[u8]) -> Vec<u8> {
731    let mut v = bytes.to_vec();
732    while v.first() == Some(&0) && v.len() > 1 {
733        v.remove(0);
734    }
735    if v.first().map(|b| b & 0x80 != 0).unwrap_or(false) {
736        v.insert(0, 0);
737    }
738    der_wrap(0x02, &v)
739}
740
741fn der_octet_string(bytes: &[u8]) -> Vec<u8> {
742    der_wrap(0x04, bytes)
743}
744
745fn der_oid(oid: &[u32]) -> Vec<u8> {
746    let mut out = Vec::new();
747    if oid.len() < 2 {
748        return der_wrap(0x06, &out);
749    }
750    out.push((oid[0] * 40 + oid[1]) as u8);
751    for &arc in &oid[2..] {
752        let mut stack = [0u8; 5];
753        let mut idx = stack.len();
754        let mut n = arc;
755        stack[idx - 1] = (n & 0x7f) as u8;
756        idx -= 1;
757        n >>= 7;
758        while n > 0 {
759            stack[idx - 1] = 0x80 | ((n & 0x7f) as u8);
760            idx -= 1;
761            n >>= 7;
762        }
763        out.extend(&stack[idx..]);
764    }
765    der_wrap(0x06, &out)
766}
767
768#[cfg(test)]
769mod tests {
770    use super::*;
771    use base64::{Engine as _, engine::general_purpose::STANDARD};
772    use std::io::Read;
773    use tempfile::tempdir;
774    
775    #[test]
776    fn test_daily_publication_and_signature_chain() {
777        let mut service = PublicationService::new();
778        let hourly_roots = vec!["a".repeat(64), "b".repeat(64)];
779
780        let mut day1 = service.create_daily_publication(&hourly_roots, 42);
781        assert_eq!(day1.entry_count, 42);
782        assert_eq!(day1.hourly_roots.len(), 2);
783        assert_eq!(day1.previous_day_root, "0".repeat(64));
784        assert!(day1.signature.is_none());
785
786        service.sign_publication(&mut day1, b"sig");
787        let sig = day1.signature.as_ref().expect("signature set");
788        assert_eq!(sig.algorithm, "RSA-PSS-SHA256");
789        assert_eq!(sig.value, STANDARD.encode(b"sig"));
790
791        let day2 = service.create_daily_publication(&hourly_roots, 1);
792        assert_eq!(day2.previous_day_root, day1.root_hash);
793    }
794
795    #[test]
796    fn test_add_tsa_timestamp_mock_only() {
797        let mut service = PublicationService::new();
798        let hourly_roots = vec!["c".repeat(64)];
799        let mut publication = service.create_daily_publication(&hourly_roots, 1);
800
801        let rt = tokio::runtime::Builder::new_current_thread()
802            .build()
803            .expect("runtime");
804
805        rt.block_on(async {
806            service
807                .add_tsa_timestamp(&mut publication, "mock://tsa")
808                .await
809                .expect("mock TSA works");
810        });
811
812        let tsa = publication.tsa_timestamp.as_ref().expect("tsa timestamp set");
813        assert_eq!(tsa.tsa_url, "mock://tsa");
814        assert!(tsa.token.starts_with("mock-sha256="));
815    }
816
817    #[test]
818    fn test_add_tsa_timestamp_rejects_non_mock() {
819        let mut service = PublicationService::new();
820        let hourly_roots = vec!["d".repeat(64)];
821        let mut publication = service.create_daily_publication(&hourly_roots, 1);
822
823        let rt = tokio::runtime::Builder::new_current_thread()
824            .build()
825            .expect("runtime");
826
827        let err = rt.block_on(async {
828            service
829                .add_tsa_timestamp(&mut publication, "https://tsa.example")
830                .await
831                .expect_err("network call should fail for placeholder endpoint")
832        });
833
834        match err {
835            TsaError::Server(_) | TsaError::Network(_) => {}
836            other => panic!("unexpected error: {other}"),
837        }
838        assert!(publication.tsa_timestamp.is_none());
839    }
840
841    #[test]
842    fn test_build_rfc3161_query_der_contains_sha256_oid() {
843        let digest = [0x11u8; 32];
844        let req = build_rfc3161_timestamp_query(&digest, "nonce").expect("query");
845        // sha256 OID bytes: 06 09 60 86 48 01 65 03 04 02 01
846        let oid = [0x06, 0x09, 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01];
847        assert!(req.windows(oid.len()).any(|w| w == oid));
848    }
849
850    #[test]
851    fn test_parse_timestamp_response_status_only() {
852        // TimeStampResp ::= SEQUENCE { status PKIStatusInfo }
853        let resp = [0x30, 0x05, 0x30, 0x03, 0x02, 0x01, 0x00];
854        let parsed = parse_timestamp_response(&resp).expect("parse");
855        assert_eq!(parsed.status, 0);
856        assert!(parsed.time_stamp_token_der.is_none());
857    }
858
859    #[test]
860    fn test_extract_generalized_time_best_effort() {
861        // DER GeneralizedTime: "20260226083045Z"
862        let mut bytes = vec![0x18, 0x0f];
863        bytes.extend_from_slice(b"20260226083045Z");
864        let ts = extract_generalized_time_rfc3339(&bytes).expect("timestamp");
865        assert!(ts.starts_with("2026-02-26T08:30:45"));
866    }
867
868    #[test]
869    fn test_canonical_json_export_is_deterministic() {
870        let service = PublicationService::new();
871        let publication = service.create_daily_publication(&["e".repeat(64)], 7);
872
873        let json1 = publication.to_canonical_json().expect("json1");
874        let json2 = publication.to_canonical_json().expect("json2");
875
876        assert_eq!(json1, json2);
877        assert!(!json1.contains('\n'));
878        assert!(json1.contains("\"entry_count\":7"));
879        assert!(json1.contains("\"hourly_roots\""));
880    }
881
882    #[test]
883    fn test_canonical_json_gzip_roundtrip() {
884        let service = PublicationService::new();
885        let publication = service.create_daily_publication(&["f".repeat(64)], 3);
886
887        let original = publication.to_canonical_json_bytes().expect("original");
888        let compressed = publication.to_canonical_json_gzip().expect("gzip");
889        assert!(!compressed.is_empty());
890
891        let mut decoder = flate2::read::GzDecoder::new(compressed.as_slice());
892        let mut decompressed = Vec::new();
893        decoder.read_to_end(&mut decompressed).expect("decompress");
894
895        assert_eq!(decompressed, original);
896    }
897
898    #[test]
899    fn test_publication_basename_is_stable() {
900        let service = PublicationService::new();
901        let publication = service.create_daily_publication(&["bb".repeat(32)], 1);
902        let base = publication.publication_basename();
903
904        assert!(base.starts_with("daily-publication-"));
905        assert!(base.contains(&publication.date));
906        assert!(base.ends_with(&publication.root_hash[..16]));
907    }
908
909    #[test]
910    fn test_verify_root_hash_detects_tamper() {
911        let service = PublicationService::new();
912        let mut publication = service.create_daily_publication(&["aa".repeat(32), "bb".repeat(32)], 2);
913        assert!(publication.verify_root_hash());
914
915        publication.hourly_roots.push("cc".repeat(32));
916        assert!(!publication.verify_root_hash());
917    }
918
919    #[test]
920    fn test_tsa_token_inspection() {
921        let tsa = TsaTimestamp {
922            tsa_url: "https://tsa.example".to_string(),
923            timestamp: "2026-02-26T00:00:00Z".to_string(),
924            token: base64_encode(&[0x18, 0x0f, b'2', b'0', b'2', b'6', b'0', b'2', b'2', b'6', b'0', b'8', b'3', b'0', b'4', b'5', b'Z']),
925        };
926        let inspected = tsa.inspect_token();
927        assert!(inspected.token_present);
928        assert!(inspected.token_base64_valid);
929        assert!(inspected.token_der_nonempty);
930        assert!(inspected.extracted_timestamp.is_some());
931
932        let bad = TsaTimestamp {
933            tsa_url: "https://tsa.example".to_string(),
934            timestamp: "2026-02-26T00:00:00Z".to_string(),
935            token: "%%%".to_string(),
936        };
937        let bad_inspected = bad.inspect_token();
938        assert!(bad_inspected.token_present);
939        assert!(!bad_inspected.token_base64_valid);
940    }
941
942    #[cfg(feature = "tsa-cms-openssl")]
943    #[test]
944    fn test_tsa_cms_verify_rejects_invalid_base64() {
945        let tsa = TsaTimestamp {
946            tsa_url: "https://tsa.example".to_string(),
947            timestamp: "2026-02-26T00:00:00Z".to_string(),
948            token: "%%%".to_string(),
949        };
950
951        let err = tsa
952            .verify_cms_signature_with_pem_roots(b"")
953            .expect_err("invalid base64 must fail");
954        match err {
955            TsaCmsVerifyError::TokenBase64(_) => {}
956            other => panic!("unexpected error: {other}"),
957        }
958    }
959
960    #[cfg(feature = "tsa-cms-openssl")]
961    #[test]
962    fn test_tsa_cms_verify_rejects_non_pkcs7_der() {
963        let tsa = TsaTimestamp {
964            tsa_url: "https://tsa.example".to_string(),
965            timestamp: "2026-02-26T00:00:00Z".to_string(),
966            token: base64_encode(&[0x30, 0x03, 0x02, 0x01, 0x00]),
967        };
968
969        let err = tsa
970            .verify_cms_signature_with_pem_roots(b"")
971            .expect_err("non-pkcs7 der must fail");
972        match err {
973            TsaCmsVerifyError::Pkcs7Parse(_) | TsaCmsVerifyError::TrustStore(_) => {}
974            other => panic!("unexpected error: {other}"),
975        }
976    }
977
978    #[cfg(not(feature = "tsa-cms-openssl"))]
979    #[test]
980    fn test_tsa_cms_verify_reports_backend_unavailable_without_feature() {
981        let tsa = TsaTimestamp {
982            tsa_url: "https://tsa.example".to_string(),
983            timestamp: "2026-02-26T00:00:00Z".to_string(),
984            token: "%%%".to_string(),
985        };
986
987        let err = tsa
988            .verify_cms_signature_with_pem_roots(b"")
989            .expect_err("backend should be unavailable without feature");
990        match err {
991            TsaCmsVerifyError::BackendUnavailable(_) => {}
992            other => panic!("unexpected error: {other}"),
993        }
994    }
995
996    #[test]
997    fn test_publish_to_filesystem_writes_json_and_gzip() {
998        let tmp = tempdir().expect("tempdir");
999        let service = PublicationService::new();
1000        let publication = service.create_daily_publication(&["aa".repeat(32)], 11);
1001
1002        let written = service
1003            .publish_to_filesystem(&publication, tmp.path(), true)
1004            .expect("publish");
1005
1006        assert!(written.json_path.exists());
1007        let gzip_path = written.gzip_path.as_ref().expect("gzip path");
1008        assert!(gzip_path.exists());
1009
1010        let json_bytes = std::fs::read(&written.json_path).expect("json bytes");
1011        assert_eq!(
1012            json_bytes,
1013            publication.to_canonical_json_bytes().expect("canonical json")
1014        );
1015
1016        let gz_bytes = std::fs::read(gzip_path).expect("gzip bytes");
1017        let mut decoder = flate2::read::GzDecoder::new(gz_bytes.as_slice());
1018        let mut out = Vec::new();
1019        decoder.read_to_end(&mut out).expect("decompress");
1020        assert_eq!(out, json_bytes);
1021    }
1022}