1use std::collections::{HashMap, HashSet};
2use std::convert::TryInto;
3use std::fs::File;
4use std::io::{Read, Seek};
5use std::path::Path;
6
7use anyhow::{Context, Result, anyhow, bail};
8use base64::Engine;
9use base64::engine::general_purpose::URL_SAFE_NO_PAD;
10use ed25519_dalek::{Signature, Verifier, VerifyingKey};
11use greentic_types::ComponentManifest;
12use greentic_types::decode_pack_manifest;
13use greentic_types::pack::extensions::component_manifests::{
14 ComponentManifestIndexV1, EXT_COMPONENT_MANIFEST_INDEX_V1, ManifestEncoding,
15};
16use greentic_types::pack_manifest::{ExtensionInline, PackManifest as GpackManifest};
17use serde::Deserialize;
18use serde_cbor;
19use serde_json;
20use sha2::{Digest, Sha256};
21use x509_parser::pem::parse_x509_pem;
22use x509_parser::prelude::*;
23use zip::ZipArchive;
24
25use crate::builder::{
26 ComponentEntry, FlowEntry, ImportRef, PackManifest, PackMeta, SBOM_FORMAT,
27 SIGNATURE_CHAIN_PATH, SIGNATURE_PATH, SbomEntry, SignatureEnvelope, hex_hash,
28 signature_digest_from_entries,
29};
30
31#[cfg(test)]
32const MAX_ARCHIVE_BYTES: u64 = 256 * 1024;
33#[cfg(not(test))]
34const MAX_ARCHIVE_BYTES: u64 = 64 * 1024 * 1024;
35
36#[cfg(test)]
37const MAX_FILE_BYTES: u64 = 64 * 1024;
38#[cfg(not(test))]
39const MAX_FILE_BYTES: u64 = 16 * 1024 * 1024;
40
41fn fmt_mb(bytes: u64) -> String {
42 format!("{} MB", bytes / (1024 * 1024))
43}
44
45#[derive(Clone, Copy, Debug, PartialEq, Eq)]
46pub enum SigningPolicy {
47 DevOk,
48 Strict,
49}
50
51#[derive(Debug, Clone, Default)]
52pub struct VerifyReport {
53 pub signature_ok: bool,
54 pub sbom_ok: bool,
55 pub warnings: Vec<String>,
56}
57
58#[derive(Debug, Clone)]
59pub struct PackLoad {
60 pub manifest: PackManifest,
61 pub report: VerifyReport,
62 pub sbom: Vec<SbomEntry>,
63 pub files: HashMap<String, Vec<u8>>,
64 pub gpack_manifest: Option<GpackManifest>,
65}
66
67#[derive(Debug, Clone)]
68pub struct PackVerifyResult {
69 pub message: String,
70}
71
72impl PackVerifyResult {
73 fn from_error(err: anyhow::Error) -> Self {
74 Self {
75 message: err.to_string(),
76 }
77 }
78}
79
80#[derive(Debug, Clone)]
81pub struct ComponentManifestIndexState {
82 pub present: bool,
83 pub index: Option<ComponentManifestIndexV1>,
84 pub error: Option<String>,
85}
86
87impl ComponentManifestIndexState {
88 pub fn ok(&self) -> bool {
89 !self.present || self.error.is_none()
90 }
91}
92
93#[derive(Debug, Clone)]
94pub struct ComponentManifestFileStatus {
95 pub component_id: String,
96 pub manifest_file: String,
97 pub encoding: ManifestEncoding,
98 pub content_hash: Option<String>,
99 pub file_present: bool,
100 pub hash_ok: Option<bool>,
101 pub decoded: bool,
102 pub inline_match: Option<bool>,
103 pub error: Option<String>,
104}
105
106impl ComponentManifestFileStatus {
107 pub fn is_ok(&self) -> bool {
108 self.error.is_none()
109 && self.file_present
110 && self.decoded
111 && self.hash_ok.unwrap_or(true)
112 && self.inline_match.unwrap_or(true)
113 }
114}
115
116#[derive(Debug, Clone)]
117pub struct ManifestFileVerificationReport {
118 pub extension_present: bool,
119 pub extension_error: Option<String>,
120 pub entries: Vec<ComponentManifestFileStatus>,
121}
122
123impl ManifestFileVerificationReport {
124 pub fn ok(&self) -> bool {
125 if !self.extension_present {
126 return true;
127 }
128 self.extension_error.is_none()
129 && self.entries.iter().all(ComponentManifestFileStatus::is_ok)
130 }
131
132 pub fn first_error(&self) -> Option<String> {
133 if let Some(err) = &self.extension_error {
134 return Some(err.clone());
135 }
136 self.entries.iter().find_map(|status| status.error.clone())
137 }
138}
139
140pub fn open_pack(path: &Path, policy: SigningPolicy) -> Result<PackLoad, PackVerifyResult> {
141 match open_pack_inner(path, policy) {
142 Ok(result) => Ok(result),
143 Err(err) => Err(PackVerifyResult::from_error(err)),
144 }
145}
146
147impl PackLoad {
148 pub fn component_manifest_index_v1(&self) -> ComponentManifestIndexState {
149 let mut state = ComponentManifestIndexState {
150 present: false,
151 index: None,
152 error: None,
153 };
154
155 let manifest = match self.gpack_manifest.as_ref() {
156 Some(manifest) => manifest,
157 None => return state,
158 };
159
160 let Some(extension) = manifest
161 .extensions
162 .as_ref()
163 .and_then(|map| map.get(EXT_COMPONENT_MANIFEST_INDEX_V1))
164 else {
165 return state;
166 };
167 state.present = true;
168
169 let inline = match extension.inline.as_ref() {
170 Some(inline) => inline,
171 None => {
172 state.error = Some("component manifest index missing inline payload".into());
173 return state;
174 }
175 };
176
177 let payload = match inline {
178 ExtensionInline::Other(value) => value,
179 _ => {
180 state.error =
181 Some("component manifest index inline payload has unexpected shape".into());
182 return state;
183 }
184 };
185
186 match ComponentManifestIndexV1::from_extension_value(payload) {
187 Ok(index) => state.index = Some(index),
188 Err(err) => state.error = Some(err.to_string()),
189 }
190
191 state
192 }
193
194 pub fn get_component_manifest_prefer_file(
195 &self,
196 component_id: &str,
197 ) -> Result<Option<ComponentManifest>> {
198 let state = self.component_manifest_index_v1();
199 if let Some(err) = state.error {
200 return Err(anyhow!(err));
201 }
202
203 if let Some(entry) = state.index.as_ref().and_then(|index| {
204 index
205 .entries
206 .iter()
207 .find(|entry| entry.component_id == component_id)
208 }) {
209 if entry.encoding != ManifestEncoding::Cbor {
210 bail!("unsupported manifest encoding {:?}", entry.encoding);
211 }
212
213 if let Some(bytes) = self.files.get(&entry.manifest_file) {
214 if let Some(expected) = entry.content_hash.as_deref() {
215 let actual = sha256_prefixed(bytes);
216 if !expected.eq_ignore_ascii_case(&actual) {
217 bail!(
218 "manifest hash mismatch for {}: expected {}, got {}",
219 entry.manifest_file,
220 expected,
221 actual
222 );
223 }
224 }
225
226 let decoded: ComponentManifest =
227 serde_cbor::from_slice(bytes).context("decode component manifest")?;
228 if decoded.id.to_string() != entry.component_id {
229 bail!(
230 "manifest id {} does not match index component_id {}",
231 decoded.id,
232 entry.component_id
233 );
234 }
235 return Ok(Some(decoded));
236 }
237 }
238
239 if let Some(component) = self.gpack_manifest.as_ref().and_then(|manifest| {
240 manifest
241 .components
242 .iter()
243 .find(|c| c.id.to_string() == component_id)
244 }) {
245 return Ok(Some(component.clone()));
246 }
247
248 Ok(None)
249 }
250
251 pub fn verify_component_manifest_files(&self) -> ManifestFileVerificationReport {
252 let mut report = ManifestFileVerificationReport {
253 extension_present: false,
254 extension_error: None,
255 entries: Vec::new(),
256 };
257
258 let state = self.component_manifest_index_v1();
259 if !state.present {
260 return report;
261 }
262 report.extension_present = true;
263
264 let Some(index) = state.index else {
265 report.extension_error = state.error;
266 return report;
267 };
268
269 let inline_components = self
270 .gpack_manifest
271 .as_ref()
272 .map(|manifest| &manifest.components);
273
274 for entry in index.entries {
275 let mut status = ComponentManifestFileStatus {
276 component_id: entry.component_id.clone(),
277 manifest_file: entry.manifest_file.clone(),
278 encoding: entry.encoding.clone(),
279 content_hash: entry.content_hash.clone(),
280 file_present: false,
281 hash_ok: None,
282 decoded: false,
283 inline_match: None,
284 error: None,
285 };
286
287 if entry.encoding != ManifestEncoding::Cbor {
288 status.error = Some("unsupported manifest encoding (expected cbor)".into());
289 report.entries.push(status);
290 continue;
291 }
292
293 let Some(bytes) = self.files.get(&entry.manifest_file) else {
294 status.error = Some("manifest file missing from archive".into());
295 report.entries.push(status);
296 continue;
297 };
298 status.file_present = true;
299
300 if let Some(expected) = entry.content_hash.as_deref() {
301 if !expected.starts_with("sha256:") {
302 status.hash_ok = Some(false);
303 status.error = Some("content_hash must use sha256:<hex>".into());
304 report.entries.push(status);
305 continue;
306 }
307 let actual = sha256_prefixed(bytes);
308 let matches = expected.eq_ignore_ascii_case(&actual);
309 status.hash_ok = Some(matches);
310 if !matches {
311 status.error = Some(format!(
312 "manifest hash mismatch: expected {}, got {}",
313 expected, actual
314 ));
315 }
316 }
317
318 match serde_cbor::from_slice::<ComponentManifest>(bytes) {
319 Ok(decoded) => {
320 status.decoded = true;
321 if decoded.id.to_string() != entry.component_id {
322 status.error.get_or_insert_with(|| {
323 format!(
324 "component id mismatch: index has {}, manifest has {}",
325 entry.component_id, decoded.id
326 )
327 });
328 }
329
330 if let Some(inline_components) = inline_components {
331 if let Some(inline) = inline_components.iter().find(|c| c.id == decoded.id)
332 {
333 let matches = inline == &decoded;
334 status.inline_match = Some(matches);
335 if !matches {
336 status.error.get_or_insert_with(|| {
337 "external manifest differs from inline manifest".into()
338 });
339 }
340 } else {
341 status.inline_match = Some(false);
342 status.error.get_or_insert_with(|| {
343 "component missing from inline manifest".into()
344 });
345 }
346 }
347 }
348 Err(err) => {
349 status
350 .error
351 .get_or_insert_with(|| format!("failed to decode manifest: {err}"));
352 }
353 }
354
355 report.entries.push(status);
356 }
357
358 report
359 }
360}
361
362fn open_pack_inner(path: &Path, policy: SigningPolicy) -> Result<PackLoad> {
363 let mut archive = ZipArchive::new(
364 File::open(path).with_context(|| format!("failed to open {}", path.display()))?,
365 )
366 .with_context(|| format!("{} is not a valid gtpack archive", path.display()))?;
367
368 let (files, total) = read_archive_entries(&mut archive)?;
369 if total > MAX_ARCHIVE_BYTES {
370 bail!(
371 "gtpack archive exceeds maximum allowed size ({}) — \
372 check that no build artifacts (e.g. .gtpack files, dist/ contents) \
373 were accidentally included",
374 fmt_mb(MAX_ARCHIVE_BYTES)
375 );
376 }
377
378 let manifest_bytes = files
379 .get("manifest.cbor")
380 .cloned()
381 .ok_or_else(|| anyhow!("manifest.cbor missing from archive"))?;
382 let decoded_gpack_manifest = decode_pack_manifest(&manifest_bytes).ok();
383 match decode_manifest(&manifest_bytes).context("manifest.cbor is invalid")? {
384 ManifestModel::Pack(manifest) => {
385 let manifest = *manifest;
386 let (sbom_doc, sbom_bytes, sbom_name) = read_sbom_required(&files)?;
387 if sbom_doc.format != SBOM_FORMAT {
388 bail!("unexpected SBOM format: {}", sbom_doc.format);
389 }
390
391 let mut warnings = Vec::new();
392 verify_sbom(&files, &sbom_doc.files, sbom_name)?;
393 let signature_ok = match (
394 files.get(SIGNATURE_PATH),
395 files.get(SIGNATURE_CHAIN_PATH),
396 Some(&sbom_bytes),
397 ) {
398 (Some(_), Some(_), Some(sbom_bytes)) => match verify_signature(
399 &files,
400 &manifest_bytes,
401 sbom_bytes,
402 &sbom_doc.files,
403 policy,
404 &mut warnings,
405 ) {
406 Ok(()) => true,
407 Err(err) => {
408 if matches!(policy, SigningPolicy::Strict) {
409 return Err(err);
410 }
411 warnings.push(format!("signature verification failed: {err}"));
412 false
413 }
414 },
415 (None, None, _) => match policy {
416 SigningPolicy::Strict => {
417 bail!("signature file `{}` missing", SIGNATURE_PATH)
418 }
419 SigningPolicy::DevOk => {
420 warnings.push("signature files missing; skipping verification".into());
421 false
422 }
423 },
424 _ => {
425 match policy {
426 SigningPolicy::Strict => bail!("signature files incomplete; missing chain"),
427 SigningPolicy::DevOk => warnings
428 .push("signature files incomplete; skipping verification".into()),
429 }
430 false
431 }
432 };
433
434 Ok(PackLoad {
435 manifest,
436 report: VerifyReport {
437 signature_ok,
438 sbom_ok: true,
439 warnings,
440 },
441 sbom: sbom_doc.files,
442 files,
443 gpack_manifest: decoded_gpack_manifest,
444 })
445 }
446 ManifestModel::Gpack(manifest) => {
447 let manifest = *manifest;
448 let mut warnings = Vec::new();
449 if manifest.schema_version != "pack-v1" {
450 warnings.push(format!(
451 "detected manifest schema {}; applying compatibility reader",
452 manifest.schema_version
453 ));
454 }
455
456 let (sbom, sbom_ok, sbom_bytes, sbom_name) = read_sbom_optional(&files, &mut warnings);
457
458 let signature_ok = match (
459 files.get(SIGNATURE_PATH),
460 files.get(SIGNATURE_CHAIN_PATH),
461 sbom_bytes.as_deref(),
462 sbom_ok,
463 ) {
464 (Some(_), Some(_), Some(sbom_bytes), true) => {
465 match verify_signature(
466 &files,
467 &manifest_bytes,
468 sbom_bytes,
469 &sbom,
470 policy,
471 &mut warnings,
472 ) {
473 Ok(()) => true,
474 Err(err) => {
475 warnings.push(format!("signature verification failed: {err}"));
476 false
477 }
478 }
479 }
480 (Some(_), Some(_), Some(_), false) => {
481 warnings.push(
482 "signature present but sbom validation failed; skipping verification"
483 .into(),
484 );
485 false
486 }
487 (Some(_), Some(_), None, _) => {
488 warnings.push(format!(
489 "signature present but {} missing; skipping verification",
490 sbom_name
491 ));
492 false
493 }
494 (None, None, _, _) => {
495 warnings.push("signature files missing; skipping verification".into());
496 false
497 }
498 _ => {
499 warnings.push("signature files incomplete; skipping verification".into());
500 false
501 }
502 };
503
504 Ok(PackLoad {
505 manifest: convert_gpack_manifest(&manifest, &files),
506 report: VerifyReport {
507 signature_ok,
508 sbom_ok,
509 warnings,
510 },
511 sbom,
512 files,
513 gpack_manifest: Some(manifest),
514 })
515 }
516 }
517}
518
519#[derive(Deserialize)]
520struct SbomDocument {
521 format: String,
522 files: Vec<SbomEntry>,
523}
524
525fn verify_sbom(
526 files: &HashMap<String, Vec<u8>>,
527 entries: &[SbomEntry],
528 sbom_name: &str,
529) -> Result<()> {
530 let mut listed = HashSet::new();
531 for entry in entries {
532 let data = files
533 .get(&entry.path)
534 .ok_or_else(|| anyhow!("sbom references missing file `{}`", entry.path))?;
535 let actual = hex_hash(data);
536 if !actual.eq_ignore_ascii_case(&entry.hash_blake3) {
537 bail!(
538 "hash mismatch for {}: expected {}, found {}",
539 entry.path,
540 entry.hash_blake3,
541 actual
542 );
543 }
544 listed.insert(entry.path.clone());
545 }
546
547 for path in files.keys() {
548 if path == SIGNATURE_PATH
549 || path == SIGNATURE_CHAIN_PATH
550 || path == "sbom.json"
551 || path == "sbom.cbor"
552 {
553 continue;
554 }
555 if !listed.contains(path) {
556 bail!("file `{}` missing from {}", path, sbom_name);
557 }
558 }
559
560 Ok(())
561}
562
563fn read_sbom_required(
564 files: &HashMap<String, Vec<u8>>,
565) -> Result<(SbomDocument, Vec<u8>, &'static str)> {
566 if let Some(sbom_bytes) = files.get("sbom.cbor") {
567 let sbom_doc: SbomDocument =
568 serde_cbor::from_slice(sbom_bytes).context("sbom.cbor is not valid CBOR")?;
569 return Ok((sbom_doc, sbom_bytes.clone(), "sbom.cbor"));
570 }
571 if let Some(sbom_bytes) = files.get("sbom.json") {
572 let sbom_doc: SbomDocument =
573 serde_json::from_slice(sbom_bytes).context("sbom.json is not valid JSON")?;
574 return Ok((sbom_doc, sbom_bytes.clone(), "sbom.json"));
575 }
576 Err(anyhow!("sbom.cbor missing from archive"))
577}
578
579fn read_sbom_optional(
580 files: &HashMap<String, Vec<u8>>,
581 warnings: &mut Vec<String>,
582) -> (Vec<SbomEntry>, bool, Option<Vec<u8>>, &'static str) {
583 if let Some(sbom_bytes) = files.get("sbom.cbor") {
584 match serde_cbor::from_slice::<SbomDocument>(sbom_bytes) {
585 Ok(sbom_doc) => {
586 let mut ok = sbom_doc.format == SBOM_FORMAT;
587 if !ok {
588 warnings.push(format!("unexpected SBOM format: {}", sbom_doc.format));
589 }
590 match verify_sbom(files, &sbom_doc.files, "sbom.cbor") {
591 Ok(()) => {}
592 Err(err) => {
593 warnings.push(err.to_string());
594 ok = false;
595 }
596 }
597 return (sbom_doc.files, ok, Some(sbom_bytes.clone()), "sbom.cbor");
598 }
599 Err(err) => {
600 warnings.push(format!("sbom.cbor is not valid CBOR: {err}"));
601 return (Vec::new(), false, Some(sbom_bytes.clone()), "sbom.cbor");
602 }
603 }
604 }
605 if let Some(sbom_bytes) = files.get("sbom.json") {
606 match serde_json::from_slice::<SbomDocument>(sbom_bytes) {
607 Ok(sbom_doc) => {
608 let mut ok = sbom_doc.format == SBOM_FORMAT;
609 if !ok {
610 warnings.push(format!("unexpected SBOM format: {}", sbom_doc.format));
611 }
612 match verify_sbom(files, &sbom_doc.files, "sbom.json") {
613 Ok(()) => {}
614 Err(err) => {
615 warnings.push(err.to_string());
616 ok = false;
617 }
618 }
619 return (sbom_doc.files, ok, Some(sbom_bytes.clone()), "sbom.json");
620 }
621 Err(err) => {
622 warnings.push(format!("sbom.json is not valid JSON: {err}"));
623 return (Vec::new(), false, Some(sbom_bytes.clone()), "sbom.json");
624 }
625 }
626 }
627 warnings.push("sbom.cbor missing; synthesized inventory for validation".into());
628 (synthesize_sbom(files), false, None, "sbom.cbor")
629}
630
631fn verify_signature(
632 files: &HashMap<String, Vec<u8>>,
633 manifest_bytes: &[u8],
634 sbom_bytes: &[u8],
635 entries: &[SbomEntry],
636 policy: SigningPolicy,
637 warnings: &mut Vec<String>,
638) -> Result<()> {
639 let signature_bytes = files
640 .get(SIGNATURE_PATH)
641 .ok_or_else(|| anyhow!("signature file `{}` missing", SIGNATURE_PATH))?;
642 let chain_bytes = files
643 .get(SIGNATURE_CHAIN_PATH)
644 .ok_or_else(|| anyhow!("certificate chain `{}` missing", SIGNATURE_CHAIN_PATH))?;
645
646 let envelope: SignatureEnvelope =
647 serde_json::from_slice(signature_bytes).context("signatures/pack.sig is not valid JSON")?;
648 let digest = signature_digest_from_entries(entries, manifest_bytes, sbom_bytes);
649 let digest_hex = digest.to_hex().to_string();
650 if !digest_hex.eq_ignore_ascii_case(&envelope.digest) {
651 bail!("signature digest mismatch");
652 }
653
654 match envelope.alg.to_ascii_lowercase().as_str() {
655 "ed25519" => verify_ed25519_signature(&envelope, digest, chain_bytes, policy, warnings)?,
656 other => bail!("unsupported signature algorithm: {}", other),
657 }
658
659 Ok(())
660}
661
662fn verify_ed25519_signature(
663 envelope: &SignatureEnvelope,
664 digest: blake3::Hash,
665 chain_bytes: &[u8],
666 policy: SigningPolicy,
667 warnings: &mut Vec<String>,
668) -> Result<()> {
669 let sig_raw = URL_SAFE_NO_PAD
670 .decode(envelope.sig.as_bytes())
671 .map_err(|err| anyhow!("invalid signature encoding: {err}"))?;
672 let sig_array: [u8; 64] = sig_raw
673 .as_slice()
674 .try_into()
675 .map_err(|_| anyhow!("signature must be 64 bytes"))?;
676 let signature = Signature::from_bytes(&sig_array);
677
678 let cert_der = parse_certificate_chain(chain_bytes)?;
679 enforce_policy(&cert_der, policy, warnings)?;
680 let first_cert = parse_certificate(&cert_der[0])?;
681 let verifying_key = extract_ed25519_key(&first_cert)?;
682 verifying_key
683 .verify(digest.as_bytes(), &signature)
684 .map_err(|err| anyhow!("signature verification failed: {err}"))?;
685 Ok(())
686}
687
688fn extract_ed25519_key(cert: &X509Certificate<'_>) -> Result<VerifyingKey> {
689 let spki = cert.public_key();
690 let key_bytes: &[u8] = spki.subject_public_key.data.as_ref();
691 if key_bytes.len() != 32 {
692 bail!(
693 "expected 32-byte Ed25519 public key, found {} bytes",
694 key_bytes.len()
695 );
696 }
697 let mut raw = [0u8; 32];
698 raw.copy_from_slice(key_bytes);
699 VerifyingKey::from_bytes(&raw).map_err(|err| anyhow!("invalid ed25519 key: {err}"))
700}
701
702fn parse_certificate(bytes: &[u8]) -> Result<X509Certificate<'_>> {
703 let (_, cert) =
704 X509Certificate::from_der(bytes).map_err(|err| anyhow!("invalid certificate: {err}"))?;
705 Ok(cert)
706}
707
708fn parse_certificate_chain(mut data: &[u8]) -> Result<Vec<Vec<u8>>> {
709 let mut certs = Vec::new();
710 loop {
711 data = trim_leading(data);
712 if data.is_empty() {
713 break;
714 }
715 let (rest, pem) = parse_x509_pem(data).map_err(|err| anyhow!("invalid PEM: {err}"))?;
716 if pem.label != "CERTIFICATE" {
717 bail!("unexpected PEM label {}; expected CERTIFICATE", pem.label);
718 }
719 certs.push(pem.contents.to_vec());
720 data = rest;
721 }
722
723 if certs.is_empty() {
724 bail!("certificate chain is empty");
725 }
726
727 Ok(certs)
728}
729
730fn enforce_policy(
731 certs: &[Vec<u8>],
732 policy: SigningPolicy,
733 warnings: &mut Vec<String>,
734) -> Result<()> {
735 let first = certs
736 .first()
737 .ok_or_else(|| anyhow!("certificate chain is empty"))?;
738 let first_cert = parse_certificate(first)?;
739 let is_dev = is_dev_certificate(&first_cert);
740
741 match policy {
742 SigningPolicy::DevOk => {
743 if certs.len() != 1 {
744 warnings.push(format!(
745 "chain contains {} certificates; dev mode expects exactly 1",
746 certs.len()
747 ));
748 }
749 }
750 SigningPolicy::Strict => {
751 if is_dev {
752 bail!("dev self-signed certificate is not allowed under strict policy");
753 }
754 }
755 }
756
757 Ok(())
758}
759
760fn is_dev_certificate(cert: &X509Certificate<'_>) -> bool {
761 let cn_matches = cert
762 .subject()
763 .iter_common_name()
764 .flat_map(|attr| attr.as_str())
765 .any(|cn| cn == "greentic-dev-local");
766 cn_matches && (cert.subject() == cert.issuer())
767}
768
769fn trim_leading(mut data: &[u8]) -> &[u8] {
770 while let Some((&byte, rest)) = data.split_first() {
771 if byte.is_ascii_whitespace() {
772 data = rest;
773 } else {
774 break;
775 }
776 }
777 data
778}
779
780fn read_archive_entries<R: Read + Seek>(
781 archive: &mut ZipArchive<R>,
782) -> Result<(HashMap<String, Vec<u8>>, u64)> {
783 let mut files = HashMap::new();
784 let mut total = 0u64;
785
786 for idx in 0..archive.len() {
787 let mut entry = archive
788 .by_index(idx)
789 .with_context(|| format!("failed to read entry #{idx}"))?;
790
791 if entry.is_dir() {
792 continue;
793 }
794 if !entry.is_file() {
795 bail!("archive entry {} is not a regular file", entry.name());
796 }
797
798 if let Some(mode) = entry.unix_mode() {
799 let file_type = mode & 0o170000;
800 if file_type != 0o100000 {
801 bail!(
802 "unsupported file type for entry {}; only regular files are allowed",
803 entry.name()
804 );
805 }
806 }
807
808 let enclosed_path = entry
809 .enclosed_name()
810 .ok_or_else(|| anyhow!("archive entry contains unsafe path: {}", entry.name()))?
811 .to_path_buf();
812 let logical = normalize_entry_path(&enclosed_path)?;
813 if files.contains_key(&logical) {
814 bail!("duplicate entry detected: {}", logical);
815 }
816
817 let size = entry.size();
818 if size > MAX_FILE_BYTES {
819 let hint = if logical.ends_with(".gtpack") || logical.contains("/dist/") {
820 " (this looks like a build artifact that was accidentally included — \
821 remove it from the pack source directory and rebuild)"
822 } else {
823 ""
824 };
825 bail!(
826 "entry {} exceeds maximum allowed size of {}{}",
827 logical,
828 fmt_mb(MAX_FILE_BYTES),
829 hint
830 );
831 }
832
833 total = total
834 .checked_add(size)
835 .ok_or_else(|| anyhow!("archive size overflow"))?;
836
837 let mut buf = Vec::with_capacity(size as usize);
838 entry
839 .read_to_end(&mut buf)
840 .with_context(|| format!("failed to read {}", logical))?;
841 files.insert(logical, buf);
842 }
843
844 Ok((files, total))
845}
846
847fn normalize_entry_path(path: &Path) -> Result<String> {
848 if path.is_absolute() {
849 bail!("archive entry uses absolute path: {}", path.display());
850 }
851
852 if path.components().any(|comp| {
853 matches!(
854 comp,
855 std::path::Component::ParentDir | std::path::Component::RootDir
856 )
857 }) {
858 bail!(
859 "archive entry contains invalid path segments: {}",
860 path.display()
861 );
862 }
863
864 let mut normalized = Vec::new();
865 for comp in path.components() {
866 match comp {
867 std::path::Component::Normal(seg) => {
868 let segment = seg
869 .to_str()
870 .ok_or_else(|| anyhow!("entry contains non-utf8 segment"))?;
871 if segment.is_empty() {
872 bail!("entry contains empty path segment");
873 }
874 normalized.push(segment.replace('\\', "/"));
875 }
876 std::path::Component::CurDir => continue,
877 _ => bail!(
878 "archive entry contains unsupported segment: {}",
879 path.display()
880 ),
881 }
882 }
883
884 if normalized.is_empty() {
885 bail!("archive entry lacks a valid filename");
886 }
887
888 Ok(normalized.join("/"))
889}
890
891#[derive(Debug)]
892enum ManifestModel {
893 Pack(Box<PackManifest>),
894 Gpack(Box<GpackManifest>),
895}
896
897fn decode_manifest(bytes: &[u8]) -> Result<ManifestModel> {
898 if let Ok(manifest) = serde_cbor::from_slice::<PackManifest>(bytes) {
899 return Ok(ManifestModel::Pack(Box::new(manifest)));
900 }
901
902 let manifest = decode_pack_manifest(bytes)?;
903 Ok(ManifestModel::Gpack(Box::new(manifest)))
904}
905
906fn synthesize_sbom(files: &HashMap<String, Vec<u8>>) -> Vec<SbomEntry> {
907 let mut entries: Vec<_> = files
908 .iter()
909 .filter(|(path, _)| *path != SIGNATURE_PATH && *path != SIGNATURE_CHAIN_PATH)
910 .map(|(path, data)| SbomEntry {
911 path: path.clone(),
912 size: data.len() as u64,
913 hash_blake3: hex_hash(data),
914 media_type: media_type_for(path).to_string(),
915 })
916 .collect();
917 entries.sort_by(|a, b| a.path.cmp(&b.path));
918 entries
919}
920
921fn media_type_for(path: &str) -> &'static str {
922 if path.ends_with(".cbor") {
923 "application/cbor"
924 } else if path.ends_with(".json") {
925 "application/json"
926 } else if path.ends_with(".wasm") {
927 "application/wasm"
928 } else if path.ends_with(".yaml") || path.ends_with(".yml") {
929 "application/yaml"
930 } else {
931 "application/octet-stream"
932 }
933}
934
935fn sha256_prefixed(bytes: &[u8]) -> String {
936 let mut sha = Sha256::new();
937 sha.update(bytes);
938 format!("sha256:{:x}", sha.finalize())
939}
940
941fn convert_gpack_manifest(
942 manifest: &GpackManifest,
943 files: &HashMap<String, Vec<u8>>,
944) -> PackManifest {
945 let publisher = manifest.publisher.clone();
946 let entry_flows = derive_entry_flows(manifest);
947 let imports = manifest
948 .dependencies
949 .iter()
950 .map(|dep| ImportRef {
951 pack_id: dep.pack_id.to_string(),
952 version_req: dep.version_req.to_string(),
953 })
954 .collect();
955 let flows = manifest.flows.iter().map(convert_gpack_flow).collect();
956 let components = manifest
957 .components
958 .iter()
959 .map(|component| {
960 let file_wasm = format!("components/{}.wasm", component.id);
961 ComponentEntry {
962 name: component.id.to_string(),
963 version: component.version.clone(),
964 file_wasm: file_wasm.clone(),
965 hash_blake3: component_hash(&file_wasm, files),
966 schema_file: None,
967 manifest_file: None,
968 world: Some(component.world.clone()),
969 capabilities: serde_json::to_value(&component.capabilities).ok(),
970 }
971 })
972 .collect();
973
974 PackManifest {
975 meta: PackMeta {
976 pack_version: crate::builder::PACK_VERSION,
977 pack_id: manifest.pack_id.to_string(),
978 version: manifest.version.clone(),
979 name: manifest.pack_id.to_string(),
980 kind: None,
981 description: None,
982 authors: if publisher.is_empty() {
983 Vec::new()
984 } else {
985 vec![publisher]
986 },
987 license: None,
988 homepage: None,
989 support: None,
990 vendor: None,
991 imports,
992 entry_flows,
993 created_at_utc: "1970-01-01T00:00:00Z".into(),
994 events: None,
995 repo: None,
996 messaging: None,
997 interfaces: Vec::new(),
998 annotations: Default::default(),
999 distribution: None,
1000 components: Vec::new(),
1001 },
1002 flows,
1003 components,
1004 distribution: None,
1005 component_descriptors: Vec::new(),
1006 }
1007}
1008
1009fn convert_gpack_flow(entry: &greentic_types::pack_manifest::PackFlowEntry) -> FlowEntry {
1010 let flow_bytes = serde_json::to_vec(&entry.flow).unwrap_or_default();
1011 let entry_point = entry
1012 .entrypoints
1013 .first()
1014 .cloned()
1015 .or_else(|| entry.flow.entrypoints.keys().next().cloned())
1016 .unwrap_or_else(|| entry.id.to_string());
1017
1018 FlowEntry {
1019 id: entry.id.to_string(),
1020 kind: entry.flow.schema_version.clone(),
1021 entry: entry_point,
1022 file_yaml: format!("flows/{}/flow.ygtc", entry.id),
1023 file_json: format!("flows/{}/flow.json", entry.id),
1024 hash_blake3: hex_hash(&flow_bytes),
1025 }
1026}
1027
1028fn derive_entry_flows(manifest: &GpackManifest) -> Vec<String> {
1029 let mut entries = Vec::new();
1030 for flow in &manifest.flows {
1031 if flow.entrypoints.is_empty() && flow.flow.entrypoints.is_empty() {
1032 entries.push(flow.id.to_string());
1033 continue;
1034 }
1035 entries.extend(flow.entrypoints.iter().cloned());
1036 entries.extend(flow.flow.entrypoints.keys().cloned());
1037 }
1038 if entries.is_empty() {
1039 entries.push(manifest.pack_id.to_string());
1040 }
1041 entries.sort();
1042 entries.dedup();
1043 entries
1044}
1045
1046fn component_hash(path: &str, files: &HashMap<String, Vec<u8>>) -> String {
1047 files
1048 .get(path)
1049 .map(|bytes| hex_hash(bytes))
1050 .unwrap_or_default()
1051}
1052
1053#[cfg(test)]
1054mod tests {
1055 use super::{MAX_ARCHIVE_BYTES, MAX_FILE_BYTES, SigningPolicy, open_pack};
1056 use crate::builder::SIGNATURE_CHAIN_PATH;
1057 use crate::builder::{
1058 ComponentArtifact, FlowBundle, PackBuilder, PackMeta, Provenance, Signing,
1059 };
1060 use blake3;
1061 use semver::Version;
1062 use serde_json::{Map, json};
1063 use std::fs::{self, File};
1064 use std::io::{Read, Write};
1065 use std::path::{Path, PathBuf};
1066 use tempfile::{TempDir, tempdir};
1067 use zip::write::SimpleFileOptions;
1068 use zip::{CompressionMethod, ZipArchive, ZipWriter};
1069
1070 #[test]
1071 fn open_pack_succeeds_for_dev_signature() {
1072 let (_dir, path) = build_pack(true);
1073 let load = open_pack(&path, SigningPolicy::DevOk).expect("reader validates pack");
1074 assert_eq!(load.manifest.meta.pack_id, "ai.greentic.demo.reader");
1075 assert!(load.report.warnings.is_empty());
1076 }
1077
1078 #[test]
1079 fn open_pack_warns_missing_signature_dev_policy() {
1080 let (_dir, path) = build_pack(false);
1081 let load = open_pack(&path, SigningPolicy::DevOk).expect("dev policy tolerates");
1082 assert!(
1083 load.report
1084 .warnings
1085 .iter()
1086 .any(|w| w.contains("signature files missing")),
1087 "expected warning about missing signatures"
1088 );
1089 }
1090
1091 #[test]
1092 fn strict_policy_rejects_dev_certificate() {
1093 let (_dir, path) = build_pack(true);
1094 let err = open_pack(&path, SigningPolicy::Strict).unwrap_err();
1095 assert!(err.message.contains("strict"));
1096 }
1097
1098 #[test]
1099 fn dev_policy_warns_for_multi_certificate_chain() {
1100 let (_dir, original) = build_pack(true);
1101 let (_tmp, rewritten) = duplicate_chain(&original);
1102 let load = open_pack(&rewritten, SigningPolicy::DevOk).expect("dev policy accepts");
1103 assert!(load.report.warnings.iter().any(|msg| msg.contains("chain")));
1104 }
1105
1106 #[test]
1107 fn path_traversal_entry_is_rejected() {
1108 let (_dir, path) = custom_zip(&[zip_entry("../evil", b"oops")]);
1109 let err = open_pack(&path, SigningPolicy::DevOk).unwrap_err();
1110 assert!(err.message.contains("unsafe path") || err.message.contains("invalid path"));
1111 }
1112
1113 #[test]
1114 fn symlink_entry_is_rejected() {
1115 let (dir, path) = custom_zip(&[zip_entry("foo", b"bar")]);
1116 patch_external_attributes(&path, 0o120777 << 16);
1117 let err = open_pack(&path, SigningPolicy::DevOk).unwrap_err();
1118 assert!(
1119 err.message.contains("unsupported file type")
1120 || err.message.contains("not a regular file")
1121 );
1122 drop(dir);
1123 }
1124
1125 #[test]
1126 fn oversized_entry_is_rejected() {
1127 let huge = vec![0u8; (MAX_FILE_BYTES + 1) as usize];
1128 let (_dir, path) = custom_zip(&[zip_entry("huge.bin", &huge)]);
1129 let err = open_pack(&path, SigningPolicy::DevOk).unwrap_err();
1130 assert!(err.message.contains("exceeds maximum"));
1131 }
1132
1133 #[test]
1134 fn oversized_archive_is_rejected() {
1135 let chunk = vec![0u8; (MAX_FILE_BYTES / 2) as usize];
1136 let needed = (MAX_ARCHIVE_BYTES / chunk.len() as u64) + 1;
1137 let mut entries = Vec::new();
1138 for idx in 0..needed {
1139 let name = format!("chunk{idx}");
1140 entries.push((name, chunk.clone()));
1141 }
1142 let (_dir, path) = custom_zip(&entries);
1143 let err = open_pack(&path, SigningPolicy::DevOk).unwrap_err();
1144 assert!(err.message.contains("archive exceeds"));
1145 }
1146
1147 fn temp_wasm(dir: &Path) -> PathBuf {
1148 let path = dir.join("component.wasm");
1149 std::fs::write(&path, [0x00u8, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00]).unwrap();
1150 path
1151 }
1152
1153 fn sample_meta() -> PackMeta {
1154 PackMeta {
1155 pack_version: crate::builder::PACK_VERSION,
1156 pack_id: "ai.greentic.demo.reader".into(),
1157 version: Version::parse("0.1.0").unwrap(),
1158 name: "Reader Demo".into(),
1159 kind: None,
1160 description: None,
1161 authors: vec!["Greentic".into()],
1162 license: None,
1163 homepage: None,
1164 support: None,
1165 vendor: None,
1166 imports: vec![],
1167 entry_flows: vec!["demo".into()],
1168 created_at_utc: "2025-01-01T00:00:00Z".into(),
1169 events: None,
1170 repo: None,
1171 messaging: None,
1172 interfaces: Vec::new(),
1173 annotations: Map::new(),
1174 distribution: None,
1175 components: Vec::new(),
1176 }
1177 }
1178
1179 fn sample_flow() -> FlowBundle {
1180 let json = json!({
1181 "id": "demo",
1182 "kind": "flow/v1",
1183 "entry": "start",
1184 "nodes": []
1185 });
1186 FlowBundle {
1187 id: "demo".into(),
1188 kind: "flow/v1".into(),
1189 entry: "start".into(),
1190 yaml: "id: demo\nentry: start\n".into(),
1191 json: json.clone(),
1192 hash_blake3: blake3::hash(&serde_json::to_vec(&json).unwrap())
1193 .to_hex()
1194 .to_string(),
1195 nodes: Vec::new(),
1196 }
1197 }
1198
1199 fn sample_provenance() -> Provenance {
1200 Provenance {
1201 builder: "greentic-pack@test".into(),
1202 git_commit: Some("abc123".into()),
1203 git_repo: None,
1204 toolchain: None,
1205 built_at_utc: "2025-01-01T00:00:00Z".into(),
1206 host: None,
1207 notes: None,
1208 }
1209 }
1210
1211 fn build_pack(include_signature: bool) -> (TempDir, PathBuf) {
1212 let dir = tempdir().unwrap();
1213 let wasm = temp_wasm(dir.path());
1214 let out = dir.path().join("demo.gtpack");
1215 let mut builder = PackBuilder::new(sample_meta())
1216 .with_flow(sample_flow())
1217 .with_component(ComponentArtifact {
1218 name: "demo".into(),
1219 version: Version::parse("1.0.0").unwrap(),
1220 wasm_path: wasm,
1221 schema_json: None,
1222 manifest_json: None,
1223 capabilities: None,
1224 world: None,
1225 hash_blake3: None,
1226 })
1227 .with_provenance(sample_provenance());
1228 if !include_signature {
1229 builder = builder.with_signing(Signing::None);
1230 }
1231 builder.build(&out).unwrap();
1232 (dir, out)
1233 }
1234
1235 fn custom_zip(entries: &[(String, Vec<u8>)]) -> (TempDir, PathBuf) {
1236 use zip::DateTime;
1237
1238 let dir = tempdir().unwrap();
1239 let path = dir.path().join("custom.gtpack");
1240 let file = File::create(&path).unwrap();
1241 let mut writer = ZipWriter::new(file);
1242 let timestamp = DateTime::from_date_and_time(1980, 1, 1, 0, 0, 0).unwrap();
1243 for (name, data) in entries.iter() {
1244 let options = SimpleFileOptions::default()
1245 .compression_method(CompressionMethod::Stored)
1246 .last_modified_time(timestamp)
1247 .unix_permissions(0o644);
1248 writer.start_file(name, options).unwrap();
1249 writer.write_all(data).unwrap();
1250 }
1251 writer.finish().unwrap();
1252 (dir, path)
1253 }
1254
1255 fn zip_entry(name: &str, data: &[u8]) -> (String, Vec<u8>) {
1256 (name.to_string(), data.to_vec())
1257 }
1258
1259 fn patch_external_attributes(path: &Path, attr: u32) {
1260 let mut bytes = fs::read(path).unwrap();
1261 let signature = [0x50, 0x4b, 0x01, 0x02];
1262 let pos = bytes
1263 .windows(4)
1264 .rposition(|window| window == signature)
1265 .expect("central directory missing");
1266 let attr_pos = pos + 38;
1267 bytes[attr_pos..attr_pos + 4].copy_from_slice(&attr.to_le_bytes());
1268 fs::write(path, bytes).unwrap();
1269 }
1270
1271 fn duplicate_chain(original: &Path) -> (TempDir, PathBuf) {
1272 use zip::DateTime;
1273
1274 let mut archive = ZipArchive::new(File::open(original).unwrap()).unwrap();
1275 let dir = tempdir().unwrap();
1276 let new_path = dir.path().join("rewritten.gtpack");
1277 let file = File::create(&new_path).unwrap();
1278 let mut writer = ZipWriter::new(file);
1279 let timestamp = DateTime::from_date_and_time(1980, 1, 1, 0, 0, 0).unwrap();
1280
1281 for i in 0..archive.len() {
1282 let mut entry = archive.by_index(i).unwrap();
1283 let mut data = Vec::new();
1284 entry.read_to_end(&mut data).unwrap();
1285 if entry.name() == SIGNATURE_CHAIN_PATH {
1286 let original = data.clone();
1287 data.push(b'\n');
1288 data.extend_from_slice(&original);
1289 }
1290 let options = SimpleFileOptions::default()
1291 .compression_method(CompressionMethod::Stored)
1292 .last_modified_time(timestamp)
1293 .unix_permissions(0o644);
1294 writer.start_file(entry.name(), options).unwrap();
1295 writer.write_all(&data).unwrap();
1296 }
1297
1298 writer.finish().unwrap();
1299 (dir, new_path)
1300 }
1301}