1use {
16 crate::{
17 binary_package_control::BinaryPackageControlFile,
18 control::{ControlField, ControlParagraph},
19 deb::reader::resolve_control_file,
20 error::{DebianError, Result},
21 io::{read_compressed, ContentDigest, DataResolver, MultiContentDigest, MultiDigester},
22 repository::{
23 release::{ChecksumType, ReleaseFile, DATE_FORMAT},
24 Compression, PublishEvent, RepositoryPathVerificationState, RepositoryWriter,
25 },
26 },
27 chrono::{DateTime, Utc},
28 futures::{AsyncRead, AsyncReadExt, StreamExt, TryStreamExt},
29 pgp::{crypto::hash::HashAlgorithm, types::SecretKeyTrait},
30 pgp_cleartext::cleartext_sign,
31 std::{
32 borrow::Cow,
33 collections::{BTreeMap, BTreeSet, HashMap},
34 pin::Pin,
35 },
36};
37
38pub const NO_PROGRESS_CB: Option<fn(PublishEvent)> = None;
40
41#[allow(clippy::type_complexity)]
43pub const NO_SIGNING_KEY: Option<(&pgp::SignedSecretKey, fn() -> String)> = None;
44
45#[derive(Clone, Copy, Debug)]
49pub enum PoolLayout {
50 ComponentThenNamePrefix,
61}
62
63impl Default for PoolLayout {
64 fn default() -> Self {
65 Self::ComponentThenNamePrefix
66 }
67}
68
69impl PoolLayout {
70 pub fn path(&self, component: &str, package: &str, filename: &str) -> String {
72 match self {
73 Self::ComponentThenNamePrefix => {
74 let name_prefix = if package.starts_with("lib") {
75 format!("{}/{}", &package[0..4], package)
76 } else {
77 format!("{}/{}", &package[0..1], package)
78 };
79
80 format!("pool/{}/{}/{}", component, name_prefix, filename)
81 }
82 }
83 }
84}
85
86pub trait DebPackageReference<'cf> {
91 fn deb_size_bytes(&self) -> Result<u64>;
95
96 fn deb_digest(&self, checksum: ChecksumType) -> Result<ContentDigest>;
100
101 fn deb_filename(&self) -> Result<String>;
105
106 fn control_file_for_packages_index(&self) -> Result<BinaryPackageControlFile<'cf>>;
113}
114
115pub struct InMemoryDebFile {
117 filename: String,
118 data: Vec<u8>,
119}
120
121impl InMemoryDebFile {
122 pub fn new(filename: String, data: Vec<u8>) -> Self {
124 Self { filename, data }
125 }
126}
127
128impl<'cf> DebPackageReference<'cf> for InMemoryDebFile {
129 fn deb_size_bytes(&self) -> Result<u64> {
130 Ok(self.data.len() as u64)
131 }
132
133 fn deb_digest(&self, checksum: ChecksumType) -> Result<ContentDigest> {
134 let mut h = checksum.new_hasher();
135 h.update(&self.data);
136 let digest = h.finish().to_vec();
137
138 Ok(match checksum {
139 ChecksumType::Md5 => ContentDigest::Md5(digest),
140 ChecksumType::Sha1 => ContentDigest::Sha1(digest),
141 ChecksumType::Sha256 => ContentDigest::Sha256(digest),
142 })
143 }
144
145 fn deb_filename(&self) -> Result<String> {
146 Ok(self.filename.clone())
147 }
148
149 fn control_file_for_packages_index(&self) -> Result<BinaryPackageControlFile<'cf>> {
150 resolve_control_file(std::io::Cursor::new(&self.data))
151 }
152}
153
154pub struct IndexFileReader<'a> {
156 pub reader: Pin<Box<dyn AsyncRead + Send + 'a>>,
158 pub compression: Compression,
160 pub directory: String,
162 pub filename: String,
164}
165
166impl<'a> IndexFileReader<'a> {
167 pub fn canonical_path(&self) -> String {
169 format!(
170 "{}/{}{}",
171 self.directory,
172 self.filename,
173 self.compression.extension()
174 )
175 }
176
177 pub fn by_hash_path(&self, digest: &ContentDigest) -> String {
179 format!(
180 "{}/by-hash/{}/{}",
181 self.directory,
182 digest.release_field_name(),
183 digest.digest_hex()
184 )
185 }
186}
187
188struct ExpandedIndexFile {
189 canonical_path: String,
190 write_path: String,
191 digests: MultiContentDigest,
192 data: Vec<u8>,
193}
194
195#[derive(Debug)]
197pub struct BinaryPackagePoolArtifact<'a> {
198 pub path: &'a str,
200 pub size: u64,
202 pub digest: ContentDigest,
204}
205
206type IndexedBinaryPackages<'a> = BTreeMap<(String, String), ControlParagraph<'a>>;
208
209type ComponentBinaryPackages<'a> = BTreeMap<(String, String), IndexedBinaryPackages<'a>>;
211
212#[derive(Debug, Default)]
269pub struct RepositoryBuilder<'cf> {
270 architectures: BTreeSet<String>,
272 components: BTreeSet<String>,
273 suite: Option<String>,
274 codename: Option<String>,
275 date: Option<DateTime<Utc>>,
276 valid_until: Option<DateTime<Utc>>,
277 description: Option<String>,
278 origin: Option<String>,
279 label: Option<String>,
280 version: Option<String>,
281 acquire_by_hash: Option<bool>,
282 checksums: BTreeSet<ChecksumType>,
283 pool_layout: PoolLayout,
284 index_file_compressions: BTreeSet<Compression>,
285 binary_packages: ComponentBinaryPackages<'cf>,
286 installer_packages: ComponentBinaryPackages<'cf>,
287 source_packages: BTreeMap<String, IndexedBinaryPackages<'cf>>,
288 translations: BTreeMap<String, ()>,
289}
290
291impl<'cf> RepositoryBuilder<'cf> {
292 pub fn new_recommended_empty() -> Self {
297 Self {
298 architectures: BTreeSet::new(),
299 components: BTreeSet::new(),
300 suite: None,
301 codename: None,
302 date: Some(Utc::now()),
303 valid_until: None,
304 description: None,
305 origin: None,
306 label: None,
307 version: None,
308 acquire_by_hash: Some(true),
309 checksums: BTreeSet::from_iter([ChecksumType::Md5, ChecksumType::Sha256]),
310 pool_layout: PoolLayout::default(),
311 index_file_compressions: BTreeSet::from_iter([
312 Compression::None,
313 Compression::Gzip,
314 Compression::Xz,
315 ]),
316 binary_packages: ComponentBinaryPackages::default(),
317 installer_packages: ComponentBinaryPackages::default(),
318 source_packages: BTreeMap::default(),
319 translations: BTreeMap::default(),
320 }
321 }
322
323 pub fn new_recommended(
330 architectures: impl Iterator<Item = impl ToString>,
331 components: impl Iterator<Item = impl ToString>,
332 suite: impl ToString,
333 codename: impl ToString,
334 ) -> Self {
335 Self {
336 architectures: BTreeSet::from_iter(architectures.map(|x| x.to_string())),
337 components: BTreeSet::from_iter(components.map(|x| x.to_string())),
338 suite: Some(suite.to_string()),
339 codename: Some(codename.to_string()),
340 ..Self::new_recommended_empty()
341 }
342 }
343
344 pub fn add_architecture(&mut self, arch: impl ToString) {
350 self.architectures.insert(arch.to_string());
351 }
352
353 pub fn add_component(&mut self, name: impl ToString) {
358 self.components.insert(name.to_string());
359 }
360
361 pub fn add_checksum(&mut self, value: ChecksumType) {
366 self.checksums.insert(value);
367 }
368
369 pub fn set_suite(&mut self, value: impl ToString) {
374 self.suite = Some(value.to_string());
375 }
376
377 pub fn set_codename(&mut self, value: impl ToString) {
382 self.codename = Some(value.to_string());
383 }
384
385 pub fn set_date(&mut self, value: DateTime<Utc>) {
389 self.date = Some(value);
390 }
391
392 pub fn set_valid_until(&mut self, value: DateTime<Utc>) {
396 self.valid_until = Some(value);
397 }
398
399 pub fn set_description(&mut self, value: impl ToString) {
401 self.description = Some(value.to_string());
402 }
403
404 pub fn set_origin(&mut self, value: impl ToString) {
406 self.origin = Some(value.to_string());
407 }
408
409 pub fn set_label(&mut self, value: impl ToString) {
411 self.label = Some(value.to_string());
412 }
413
414 pub fn set_version(&mut self, value: impl ToString) {
418 self.version = Some(value.to_string());
419 }
420
421 pub fn set_acquire_by_hash(&mut self, value: bool) {
425 self.acquire_by_hash = Some(value);
426 }
427
428 pub fn set_pool_layout(&mut self, layout: PoolLayout) -> Result<()> {
433 if self.have_entries() {
434 Err(DebianError::RepositoryBuildPoolLayoutImmutable)
435 } else {
436 self.pool_layout = layout;
437 Ok(())
438 }
439 }
440
441 fn have_entries(&self) -> bool {
442 !self.binary_packages.is_empty()
443 || !self.source_packages.is_empty()
444 || !self.installer_packages.is_empty()
445 || !self.translations.is_empty()
446 }
447
448 pub fn add_binary_deb(
460 &mut self,
461 component: &str,
462 deb: &impl DebPackageReference<'cf>,
463 ) -> Result<String> {
464 if !self.components.contains(component) {
465 return Err(DebianError::RepositoryBuildUnknownComponent(
466 component.to_string(),
467 ));
468 }
469
470 let original_control_file = deb.control_file_for_packages_index()?;
471
472 let package = original_control_file.package()?;
473 let version = original_control_file.version_str()?;
474 let arch = original_control_file.architecture()?;
475
476 if !self.architectures.contains(arch) {
477 return Err(DebianError::RepositoryBuildUnknownArchitecture(
478 arch.to_string(),
479 ));
480 }
481
482 let mut para = ControlParagraph::default();
485
486 for field in original_control_file.iter_fields() {
490 if ![
491 "Description",
492 "Filename",
493 "Size",
494 "MD5sum",
495 "SHA1",
496 "SHA256",
497 ]
498 .contains(&field.name())
499 {
500 para.set_field(field.clone());
501 }
502 }
503
504 if let Some(description) = original_control_file.field("Description") {
508 let description = description.value_str();
509
510 if let Some(index) = description.find('\n') {
511 let mut h = ChecksumType::Md5.new_hasher();
512 h.update(description.as_bytes());
513 h.update(b"\n");
514 let digest = h.finish();
515
516 para.set_field_from_string(
517 "Description".into(),
518 (description[0..index]).to_string().into(),
519 );
520 para.set_field_from_string("Description-md5".into(), hex::encode(digest).into());
521 } else {
522 para.set_field_from_string("Description".into(), description.to_string().into());
523 }
524 }
525
526 let filename = self.pool_layout.path(
528 component,
529 if let Some(name) = original_control_file.source() {
530 name
531 } else {
532 package
533 },
534 &deb.deb_filename()?,
535 );
536 para.set_field_from_string("Filename".into(), filename.clone().into());
537
538 para.set_field_from_string("Size".into(), format!("{}", deb.deb_size_bytes()?).into());
541
542 for checksum in &self.checksums {
544 let digest = deb.deb_digest(*checksum)?;
545
546 para.set_field_from_string(checksum.field_name().into(), digest.digest_hex().into());
547 }
548
549 let component_key = (component.to_string(), arch.to_string());
550 let package_key = (package.to_string(), version.to_string());
551 self.binary_packages
552 .entry(component_key)
553 .or_default()
554 .insert(package_key, para);
555
556 Ok(filename)
557 }
558
559 pub fn binary_package_components(&self) -> impl Iterator<Item = (&str, &str)> + '_ {
563 self.binary_packages
564 .keys()
565 .map(|(a, b)| (a.as_str(), b.as_str()))
566 }
567
568 pub fn iter_component_binary_packages(
573 &self,
574 component: impl ToString,
575 architecture: impl ToString,
576 ) -> Box<dyn Iterator<Item = &'_ ControlParagraph> + Send + '_> {
577 if let Some(packages) = self
578 .binary_packages
579 .get(&(component.to_string(), architecture.to_string()))
580 {
581 Box::new(packages.values())
582 } else {
583 Box::new(std::iter::empty())
584 }
585 }
586
587 pub fn iter_component_binary_package_pool_artifacts(
589 &self,
590 component: impl ToString,
591 architecture: impl ToString,
592 ) -> impl Iterator<Item = Result<BinaryPackagePoolArtifact<'_>>> + '_ {
593 self.iter_component_binary_packages(component, architecture)
594 .map(|para| {
595 let path = para
596 .field_str("Filename")
597 .expect("Filename should have been populated at package add time");
598 let size = para
599 .field_u64("Size")
600 .expect("Size should have been populated at package add time")
601 .expect("Size should parse to an integer");
602
603 let strongest_checksum = self
606 .checksums
607 .iter()
608 .last()
609 .expect("should have at least 1 checksum defined");
610
611 let digest_hex = para
612 .field_str(strongest_checksum.field_name())
613 .expect("checksum's field should have been set");
614 let digest = ContentDigest::from_hex_digest(*strongest_checksum, digest_hex)?;
615
616 Ok(BinaryPackagePoolArtifact { path, size, digest })
617 })
618 }
619
620 pub fn component_binary_packages_reader(
625 &self,
626 component: impl ToString,
627 architecture: impl ToString,
628 ) -> impl AsyncRead + '_ {
629 futures::stream::iter(
630 self.iter_component_binary_packages(component, architecture)
631 .map(|p| Ok(format!("{}\n", p.to_string()))),
632 )
633 .into_async_read()
634 }
635
636 pub fn component_binary_packages_reader_compression(
638 &self,
639 component: impl ToString,
640 architecture: impl ToString,
641 compression: Compression,
642 ) -> Pin<Box<dyn AsyncRead + Send + '_>> {
643 read_compressed(
644 futures::io::BufReader::new(
645 self.component_binary_packages_reader(
646 component.to_string(),
647 architecture.to_string(),
648 ),
649 ),
650 compression,
651 )
652 }
653
654 pub fn binary_packages_index_readers(&self) -> impl Iterator<Item = IndexFileReader<'_>> + '_ {
656 self.binary_packages
657 .keys()
658 .flat_map(move |(component, architecture)| {
659 self.index_file_compressions
660 .iter()
661 .map(move |compression| IndexFileReader {
662 reader: self.component_binary_packages_reader_compression(
663 component,
664 architecture,
665 *compression,
666 ),
667 compression: *compression,
668 directory: format!("{}/binary-{}", component, architecture),
669 filename: "Packages".to_string(),
670 })
671 })
672 }
673
674 pub fn index_file_readers(&self) -> impl Iterator<Item = IndexFileReader<'_>> + '_ {
678 self.binary_packages_index_readers()
679 }
680
681 pub fn iter_binary_packages_pool_artifacts(
683 &self,
684 ) -> impl Iterator<Item = Result<BinaryPackagePoolArtifact<'_>>> + '_ {
685 self.binary_packages
686 .keys()
687 .flat_map(move |(component, architecture)| {
688 self.iter_component_binary_package_pool_artifacts(component, architecture)
689 })
690 }
691
692 pub async fn publish_pool_artifacts<F>(
699 &self,
700 resolver: &impl DataResolver,
701 writer: &impl RepositoryWriter,
702 threads: usize,
703 progress_cb: &Option<F>,
704 ) -> Result<()>
705 where
706 F: Fn(PublishEvent),
707 {
708 let artifacts = self
709 .iter_binary_packages_pool_artifacts()
710 .collect::<Result<Vec<_>>>()?;
711
712 if let Some(ref cb) = progress_cb {
713 cb(PublishEvent::ResolvedPoolArtifacts(artifacts.len()));
714 }
715
716 let mut fs = futures::stream::iter(
718 artifacts
719 .iter()
720 .map(|a| writer.verify_path(a.path, Some((a.size, a.digest.clone())))),
721 )
722 .buffer_unordered(threads);
723
724 let mut missing_paths = BTreeSet::new();
725
726 while let Some(result) = fs.next().await {
727 let result = result?;
728
729 match result.state {
730 RepositoryPathVerificationState::ExistsNoIntegrityCheck
731 | RepositoryPathVerificationState::ExistsIntegrityVerified => {
732 if let Some(ref cb) = progress_cb {
733 cb(PublishEvent::PoolArtifactCurrent(result.path.to_string()));
734 }
735 }
736 RepositoryPathVerificationState::ExistsIntegrityMismatch
737 | RepositoryPathVerificationState::Missing => {
738 if let Some(ref cb) = progress_cb {
739 cb(PublishEvent::PoolArtifactMissing(result.path.to_string()));
740 }
741
742 missing_paths.insert(result.path);
743 }
744 }
745 }
746
747 if let Some(ref cb) = progress_cb {
748 cb(PublishEvent::PoolArtifactsToPublish(missing_paths.len()));
749 }
750
751 let mut fs = futures::stream::iter(
754 artifacts
755 .iter()
756 .filter(|a| missing_paths.contains(a.path))
757 .map(|a| get_path_and_copy(resolver, writer, a)),
758 )
759 .buffer_unordered(threads);
760
761 while let Some(artifact) = fs.next().await {
762 let artifact = artifact?;
763
764 if let Some(ref cb) = progress_cb {
765 cb(PublishEvent::PoolArtifactCreated(
766 artifact.path.to_string(),
767 artifact.size,
768 ));
769 }
770 }
771
772 Ok(())
773 }
774
775 async fn expand_index_file_reader<'ifr, 'slf: 'ifr>(
776 &'slf self,
777 mut ifr: IndexFileReader<'ifr>,
778 ) -> Result<Box<dyn Iterator<Item = ExpandedIndexFile> + 'ifr>> {
779 let mut buf = vec![];
780 ifr.reader.read_to_end(&mut buf).await?;
781
782 let mut digester = MultiDigester::default();
783 digester.update(&buf);
784 let digests = digester.finish();
785
786 if self.acquire_by_hash == Some(true) {
787 Ok(Box::new(self.checksums.iter().map(move |checksum| {
788 ExpandedIndexFile {
789 canonical_path: ifr.canonical_path(),
790 write_path: ifr.by_hash_path(digests.digest_from_checksum(*checksum)),
791 digests: digests.clone(),
792 data: buf.clone(),
793 }
794 })))
795 } else {
796 Ok(Box::new(std::iter::once(ExpandedIndexFile {
797 canonical_path: ifr.canonical_path(),
798 write_path: ifr.canonical_path(),
799 digests,
800 data: buf,
801 })))
802 }
803 }
804
805 fn static_release_fields(&self) -> impl Iterator<Item = ControlField<'_>> {
807 let mut fields: BTreeMap<Cow<'_, str>, Cow<'_, str>> = BTreeMap::new();
808
809 fields.insert(
810 "Components".into(),
811 self.components
812 .iter()
813 .map(|x| x.as_str())
814 .collect::<Vec<_>>()
815 .join(" ")
816 .into(),
817 );
818
819 fields.insert(
820 "Architectures".into(),
821 self.architectures
822 .iter()
823 .map(|x| x.as_str())
824 .collect::<Vec<_>>()
825 .join(" ")
826 .into(),
827 );
828
829 if let Some(suite) = &self.suite {
830 fields.insert("Suite".into(), suite.into());
831 }
832 if let Some(codename) = &self.codename {
833 fields.insert("Codename".into(), codename.into());
834 }
835 if let Some(date) = &self.date {
836 fields.insert(
837 "Date".into(),
838 format!("{}", date.format(DATE_FORMAT)).into(),
839 );
840 }
841 if let Some(valid_until) = &self.valid_until {
842 fields.insert(
843 "Valid-Until".into(),
844 format!("{}", valid_until.format(DATE_FORMAT)).into(),
845 );
846 }
847 if let Some(description) = &self.description {
848 fields.insert("Description".into(), description.into());
849 }
850 if let Some(origin) = &self.origin {
851 fields.insert("Origin".into(), origin.into());
852 }
853 if let Some(label) = &self.label {
854 fields.insert("Label".into(), label.into());
855 }
856 if let Some(version) = &self.version {
857 fields.insert("Version".into(), version.into());
858 }
859 if let Some(acquire_by_hash) = self.acquire_by_hash {
860 fields.insert(
861 "Acquire-By-Hash".into(),
862 if acquire_by_hash { "yes" } else { "no" }.into(),
863 );
864 }
865
866 fields.into_iter().map(|(k, v)| ControlField::new(k, v))
867 }
868
869 pub fn create_release_file(
874 &self,
875 indices: impl Iterator<Item = (String, (u64, MultiContentDigest))>,
876 ) -> Result<ReleaseFile<'_>> {
877 let mut para = ControlParagraph::default();
878
879 for field in self.static_release_fields() {
880 para.set_field(field);
881 }
882
883 let mut digests_by_field = HashMap::new();
884
885 for (path, (size, digests)) in indices {
886 for digest in digests.iter_digests() {
887 digests_by_field
888 .entry(digest.release_field_name())
889 .or_insert_with(BTreeMap::new)
890 .insert(path.clone(), (size, digest.digest_hex()));
891 }
892 }
893
894 for checksum in self.checksums.iter() {
895 let default = BTreeMap::new();
897 let entries = digests_by_field
898 .get(checksum.field_name())
899 .unwrap_or(&default);
900
901 let longest_path = entries.keys().map(|x| x.len()).max().unwrap_or_default();
902 let longest_size = entries
903 .values()
904 .map(|(size, _)| format!("{}", size).len())
905 .max()
906 .unwrap_or_default();
907
908 para.set_field(ControlField::new(
909 checksum.field_name().into(),
910 std::iter::once("".to_string())
911 .chain(entries.iter().map(|(path, (size, digest))| {
912 format!(
913 " {:<path_width$} {:>size_width$} {}",
914 path,
915 size,
916 digest,
917 path_width = longest_path,
918 size_width = longest_size
919 )
920 }))
921 .collect::<Vec<_>>()
922 .join("\n")
923 .into(),
924 ));
925 }
926
927 Ok(para.into())
928 }
929
930 pub async fn publish_indices<F, PW>(
939 &self,
940 writer: &impl RepositoryWriter,
941 path_prefix: Option<&str>,
942 threads: usize,
943 progress_cb: &Option<F>,
944 signing_key: Option<(&impl SecretKeyTrait, PW)>,
945 ) -> Result<()>
946 where
947 F: Fn(PublishEvent),
948 PW: FnOnce() -> String,
949 {
950 let mut index_paths = BTreeMap::new();
951
952 let mut fs = futures::stream::iter(
956 self.index_file_readers()
957 .map(|ifr| self.expand_index_file_reader(ifr)),
958 )
959 .buffer_unordered(threads);
960
961 let mut iters = vec![];
962
963 while let Some(res) = fs.try_next().await? {
964 for mut eif in res {
965 if let Some(prefix) = path_prefix {
966 eif.write_path = format!("{}/{}", prefix.trim_matches('/'), eif.write_path);
967 }
968
969 if let Some(cb) = progress_cb {
970 cb(PublishEvent::IndexFileToWrite(eif.write_path.clone()));
971 }
972
973 index_paths.insert(
974 eif.canonical_path.clone(),
975 (eif.data.len() as u64, eif.digests.clone()),
976 );
977
978 iters.push(eif);
979 }
980 }
981
982 let mut fs = futures::stream::iter(iters.into_iter().map(|eif| {
983 writer.write_path(
984 eif.write_path.into(),
985 Box::pin(futures::io::Cursor::new(eif.data)),
986 )
987 }))
988 .buffer_unordered(threads);
989
990 while let Some(write) = fs.try_next().await? {
991 if let Some(cb) = progress_cb {
992 cb(PublishEvent::IndexFileWritten(
993 write.path.to_string(),
994 write.bytes_written,
995 ));
996 }
997 }
998
999 let release = self.create_release_file(index_paths.into_iter())?;
1002
1003 let (release_path, inrelease_path) = if let Some(prefix) = path_prefix {
1004 (
1005 format!("{}/Release", prefix.trim_matches('/')),
1006 format!("{}/InRelease", prefix.trim_matches('/')),
1007 )
1008 } else {
1009 ("Release".to_string(), "InRelease".to_string())
1010 };
1011
1012 if let Some(cb) = progress_cb {
1013 cb(PublishEvent::IndexFileToWrite(release_path.clone()))
1014 }
1015
1016 let release_write = writer
1017 .write_path(
1018 release_path.into(),
1019 Box::pin(futures::io::Cursor::new(release.to_string().into_bytes())),
1020 )
1021 .await?;
1022
1023 if let Some(cb) = progress_cb {
1024 cb(PublishEvent::IndexFileWritten(
1025 release_write.path.to_string(),
1026 release_write.bytes_written,
1027 ));
1028 }
1029
1030 if let Some((key, password)) = signing_key {
1031 let inrelease_content = cleartext_sign(
1032 key,
1033 password,
1034 HashAlgorithm::SHA2_256,
1035 std::io::Cursor::new(release.to_string().as_bytes()),
1036 )?;
1037
1038 if let Some(cb) = progress_cb {
1039 cb(PublishEvent::IndexFileToWrite(inrelease_path.clone()));
1040 }
1041
1042 let inrelease_write = writer
1043 .write_path(
1044 inrelease_path.into(),
1045 Box::pin(futures::io::Cursor::new(inrelease_content.into_bytes())),
1046 )
1047 .await?;
1048
1049 if let Some(cb) = progress_cb {
1050 cb(PublishEvent::IndexFileWritten(
1051 inrelease_write.path.to_string(),
1052 inrelease_write.bytes_written,
1053 ));
1054 }
1055 }
1056
1057 Ok(())
1058 }
1059
1060 pub async fn publish<F, PW>(
1087 &self,
1088 writer: &impl RepositoryWriter,
1089 resolver: &impl DataResolver,
1090 distribution_path: &str,
1091 threads: usize,
1092 progress_cb: &Option<F>,
1093 signing_key: Option<(&impl SecretKeyTrait, PW)>,
1094 ) -> Result<()>
1095 where
1096 F: Fn(PublishEvent),
1097 PW: FnOnce() -> String,
1098 {
1099 self.publish_pool_artifacts(resolver, writer, threads, progress_cb)
1100 .await?;
1101
1102 self.publish_indices(
1103 writer,
1104 Some(distribution_path),
1105 threads,
1106 progress_cb,
1107 signing_key,
1108 )
1109 .await?;
1110
1111 Ok(())
1112 }
1113}
1114
1115async fn get_path_and_copy<'a, 'b>(
1116 resolver: &impl DataResolver,
1117 writer: &impl RepositoryWriter,
1118 artifact: &'a BinaryPackagePoolArtifact<'b>,
1119) -> Result<&'a BinaryPackagePoolArtifact<'b>> {
1120 let reader = resolver
1124 .get_path_with_digest_verification(artifact.path, artifact.size, artifact.digest.clone())
1125 .await?;
1126
1127 writer.write_path(artifact.path.into(), reader).await?;
1128
1129 Ok(artifact)
1130}
1131
1132#[cfg(test)]
1133mod test {
1134 #[cfg(feature = "http")]
1135 use crate::repository::http::HttpRepositoryClient;
1136 use {
1137 super::*,
1138 crate::{
1139 io::PathMappingDataResolver,
1140 repository::{
1141 RepositoryPathVerification, RepositoryPathVerificationState, RepositoryRootReader,
1142 RepositoryWrite,
1143 },
1144 signing_key::{create_self_signed_key, signing_secret_key_params_builder},
1145 },
1146 async_trait::async_trait,
1147 futures::AsyncReadExt,
1148 std::borrow::Cow,
1149 };
1150
1151 const BULLSEYE_URL: &str = "http://snapshot.debian.org/archive/debian/20211120T085721Z";
1152
1153 #[derive(Default)]
1154 struct CapturingWriter {
1155 paths: std::sync::Mutex<HashMap<String, Vec<u8>>>,
1156 }
1157
1158 impl CapturingWriter {
1159 fn get_path(&self, path: impl ToString) -> Option<Vec<u8>> {
1160 self.paths.lock().unwrap().get(&path.to_string()).cloned()
1161 }
1162 }
1163
1164 #[async_trait]
1165 impl RepositoryWriter for CapturingWriter {
1166 async fn verify_path<'path>(
1167 &self,
1168 path: &'path str,
1169 _expected_content: Option<(u64, ContentDigest)>,
1170 ) -> Result<RepositoryPathVerification<'path>> {
1171 Ok(RepositoryPathVerification {
1172 path,
1173 state: RepositoryPathVerificationState::Missing,
1174 })
1175 }
1176
1177 async fn write_path<'path, 'reader>(
1178 &self,
1179 path: Cow<'path, str>,
1180 reader: Pin<Box<dyn AsyncRead + Send + 'reader>>,
1181 ) -> Result<RepositoryWrite<'path>> {
1182 let mut writer = futures::io::Cursor::new(Vec::<u8>::new());
1183
1184 let bytes_written = futures::io::copy(reader, &mut writer)
1185 .await
1186 .map_err(|e| DebianError::RepositoryIoPath(path.to_string(), e))?;
1187
1188 self.paths
1189 .lock()
1190 .unwrap()
1191 .insert(path.to_string(), writer.into_inner());
1192
1193 Ok(RepositoryWrite {
1194 path,
1195 bytes_written,
1196 })
1197 }
1198 }
1199
1200 #[test]
1201 fn pool_layout_paths() {
1202 let layout = PoolLayout::ComponentThenNamePrefix;
1203
1204 assert_eq!(
1205 layout.path("main", "python3.9", "python3.9_3.9.9-1_arm64.deb"),
1206 "pool/main/p/python3.9/python3.9_3.9.9-1_arm64.deb"
1207 );
1208 assert_eq!(
1209 layout.path("main", "libzstd", "zstd_1.4.8+dfsg-2.1_amd64.deb"),
1210 "pool/main/libz/libzstd/zstd_1.4.8+dfsg-2.1_amd64.deb"
1211 );
1212 }
1213
1214 #[tokio::test]
1215 #[cfg(feature = "http")]
1216 async fn bullseye_binary_packages_reader() -> Result<()> {
1217 let root = HttpRepositoryClient::new(BULLSEYE_URL).unwrap();
1218 let release = root.release_reader("bullseye").await.unwrap();
1219
1220 let packages = release
1221 .resolve_packages("main", "amd64", false)
1222 .await
1223 .unwrap();
1224
1225 let mut builder = RepositoryBuilder::new_recommended(
1226 ["all", "amd64"].iter(),
1227 ["main"].iter(),
1228 "suite",
1229 "codename",
1230 );
1231
1232 let mut mapping_resolver = PathMappingDataResolver::new(root);
1233
1234 for package in packages
1236 .iter()
1237 .filter(|cf| {
1238 if let Some(Ok(size)) = cf.size() {
1239 size < 1000000
1240 } else {
1241 false
1242 }
1243 })
1244 .take(10)
1245 {
1246 let dest_filename = builder.add_binary_deb("main", package)?;
1247
1248 let source_filename = package.field_str("Filename").unwrap();
1249
1250 mapping_resolver.add_path_map(dest_filename, source_filename);
1251 }
1252
1253 let pool_artifacts = builder
1254 .iter_binary_packages_pool_artifacts()
1255 .collect::<Result<Vec<_>>>()?;
1256 assert_eq!(pool_artifacts.len(), 10);
1257
1258 let mut entries = builder.binary_packages_index_readers().collect::<Vec<_>>();
1259 assert_eq!(entries.len(), 6);
1260 assert!(entries
1261 .iter()
1262 .all(|entry| entry.canonical_path().starts_with("main/binary-")));
1263
1264 for entry in entries.iter_mut() {
1265 let mut buf = vec![];
1266 entry.reader.read_to_end(&mut buf).await.unwrap();
1267 }
1268
1269 let writer = CapturingWriter::default();
1270
1271 let cb = |event| {
1272 eprintln!("{}", event);
1273 };
1274
1275 let passwd_fn = String::new;
1276 let signed_secret_key = create_self_signed_key(
1277 signing_secret_key_params_builder("Me <someone@example.com>")
1278 .build()
1279 .unwrap(),
1280 passwd_fn,
1281 )
1282 .unwrap()
1283 .0;
1284
1285 builder
1286 .publish(
1287 &writer,
1288 &mapping_resolver,
1289 "dists/mydist",
1290 10,
1291 &Some(cb),
1292 Some((&signed_secret_key, passwd_fn)),
1293 )
1294 .await?;
1295
1296 let wanted_paths = ["dists/mydist/Release", "dists/mydist/InRelease"];
1297
1298 assert!(wanted_paths.iter().all(|path| writer
1299 .paths
1300 .lock()
1301 .unwrap()
1302 .contains_key(&path.to_string())));
1303
1304 let release = ReleaseFile::from_armored_reader(std::io::Cursor::new(
1305 writer.get_path("dists/mydist/InRelease").unwrap(),
1306 ))
1307 .unwrap();
1308
1309 let signatures = release
1310 .signatures()
1311 .expect("PGP signatures should have been parsed");
1312 assert_eq!(
1313 signatures
1314 .iter_signatures_from_key(&signed_secret_key)
1315 .count(),
1316 1
1317 );
1318
1319 signatures.verify(&signed_secret_key).unwrap();
1320
1321 Ok(())
1322 }
1323}
1324
1325#[cfg(test)]
1326mod tests {
1327 use {
1328 super::*,
1329 crate::{
1330 repository::{filesystem::FilesystemRepositoryWriter, reader_from_str},
1331 signing_key::{create_self_signed_key, signing_secret_key_params_builder},
1332 },
1333 tempfile::TempDir,
1334 };
1335
1336 fn temp_dir() -> Result<TempDir> {
1337 Ok(tempfile::Builder::new()
1338 .prefix("debian-packaging-test-")
1339 .tempdir()?)
1340 }
1341
1342 #[tokio::test]
1343 async fn publish_empty() -> Result<()> {
1344 let td = temp_dir()?;
1345
1346 let mut builder = RepositoryBuilder::new_recommended(
1347 ["amd64"].into_iter(),
1348 ["main"].into_iter(),
1349 "suite",
1350 "codename",
1351 );
1352
1353 builder.set_description("description");
1354 builder.set_version("1");
1355
1356 let writer = FilesystemRepositoryWriter::new(td.path());
1357
1358 let key_params = signing_secret_key_params_builder("someone@example.com")
1359 .build()
1360 .unwrap();
1361 let key = create_self_signed_key(key_params, String::new)?.0;
1362
1363 builder
1364 .publish_indices(
1365 &writer,
1366 Some("dists/dist"),
1367 1,
1368 &NO_PROGRESS_CB,
1369 Some((&key, String::new)),
1370 )
1371 .await?;
1372
1373 let reader = reader_from_str(format!("file://{}", td.path().display()))?;
1374
1375 let release_reader = reader.release_reader("dist").await?;
1376
1377 let indices = release_reader.classified_indices_entries()?;
1378 assert!(indices.is_empty());
1379
1380 Ok(())
1381 }
1382}