use {
crate::{
binary_package_control::BinaryPackageControlFile,
control::{ControlField, ControlParagraph},
deb::reader::resolve_control_file,
error::{DebianError, Result},
io::{read_compressed, ContentDigest, DataResolver, MultiContentDigest, MultiDigester},
repository::{
release::{ChecksumType, ReleaseFile, DATE_FORMAT},
Compression, PublishEvent, RepositoryPathVerificationState, RepositoryWriter,
},
},
chrono::{DateTime, Utc},
futures::{AsyncRead, AsyncReadExt, StreamExt, TryStreamExt},
pgp::{crypto::HashAlgorithm, types::SecretKeyTrait},
pgp_cleartext::cleartext_sign,
std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet, HashMap},
pin::Pin,
},
};
pub const NO_PROGRESS_CB: Option<fn(PublishEvent)> = None;
#[allow(clippy::type_complexity)]
pub const NO_SIGNING_KEY: Option<(&pgp::SignedSecretKey, fn() -> String)> = None;
#[derive(Clone, Copy, Debug)]
pub enum PoolLayout {
ComponentThenNamePrefix,
}
impl Default for PoolLayout {
fn default() -> Self {
Self::ComponentThenNamePrefix
}
}
impl PoolLayout {
pub fn path(&self, component: &str, package: &str, filename: &str) -> String {
match self {
Self::ComponentThenNamePrefix => {
let name_prefix = if package.starts_with("lib") {
format!("{}/{}", &package[0..4], package)
} else {
format!("{}/{}", &package[0..1], package)
};
format!("pool/{}/{}/{}", component, name_prefix, filename)
}
}
}
}
pub trait DebPackageReference<'cf> {
fn deb_size_bytes(&self) -> Result<u64>;
fn deb_digest(&self, checksum: ChecksumType) -> Result<ContentDigest>;
fn deb_filename(&self) -> Result<String>;
fn control_file_for_packages_index(&self) -> Result<BinaryPackageControlFile<'cf>>;
}
pub struct InMemoryDebFile {
filename: String,
data: Vec<u8>,
}
impl InMemoryDebFile {
pub fn new(filename: String, data: Vec<u8>) -> Self {
Self { filename, data }
}
}
impl<'cf> DebPackageReference<'cf> for InMemoryDebFile {
fn deb_size_bytes(&self) -> Result<u64> {
Ok(self.data.len() as u64)
}
fn deb_digest(&self, checksum: ChecksumType) -> Result<ContentDigest> {
let mut h = checksum.new_hasher();
h.update(&self.data);
let digest = h.finish().to_vec();
Ok(match checksum {
ChecksumType::Md5 => ContentDigest::Md5(digest),
ChecksumType::Sha1 => ContentDigest::Sha1(digest),
ChecksumType::Sha256 => ContentDigest::Sha256(digest),
})
}
fn deb_filename(&self) -> Result<String> {
Ok(self.filename.clone())
}
fn control_file_for_packages_index(&self) -> Result<BinaryPackageControlFile<'cf>> {
resolve_control_file(std::io::Cursor::new(&self.data))
}
}
pub struct IndexFileReader<'a> {
pub reader: Pin<Box<dyn AsyncRead + Send + 'a>>,
pub compression: Compression,
pub directory: String,
pub filename: String,
}
impl<'a> IndexFileReader<'a> {
pub fn canonical_path(&self) -> String {
format!(
"{}/{}{}",
self.directory,
self.filename,
self.compression.extension()
)
}
pub fn by_hash_path(&self, digest: &ContentDigest) -> String {
format!(
"{}/by-hash/{}/{}",
self.directory,
digest.release_field_name(),
digest.digest_hex()
)
}
}
struct ExpandedIndexFile {
canonical_path: String,
write_path: String,
digests: MultiContentDigest,
data: Vec<u8>,
}
#[derive(Debug)]
pub struct BinaryPackagePoolArtifact<'a> {
pub path: &'a str,
pub size: u64,
pub digest: ContentDigest,
}
type IndexedBinaryPackages<'a> = BTreeMap<(String, String), ControlParagraph<'a>>;
type ComponentBinaryPackages<'a> = BTreeMap<(String, String), IndexedBinaryPackages<'a>>;
#[derive(Debug, Default)]
pub struct RepositoryBuilder<'cf> {
architectures: BTreeSet<String>,
components: BTreeSet<String>,
suite: Option<String>,
codename: Option<String>,
date: Option<DateTime<Utc>>,
valid_until: Option<DateTime<Utc>>,
description: Option<String>,
origin: Option<String>,
label: Option<String>,
version: Option<String>,
acquire_by_hash: Option<bool>,
checksums: BTreeSet<ChecksumType>,
pool_layout: PoolLayout,
index_file_compressions: BTreeSet<Compression>,
binary_packages: ComponentBinaryPackages<'cf>,
installer_packages: ComponentBinaryPackages<'cf>,
source_packages: BTreeMap<String, IndexedBinaryPackages<'cf>>,
translations: BTreeMap<String, ()>,
}
impl<'cf> RepositoryBuilder<'cf> {
pub fn new_recommended_empty() -> Self {
Self {
architectures: BTreeSet::new(),
components: BTreeSet::new(),
suite: None,
codename: None,
date: Some(Utc::now()),
valid_until: None,
description: None,
origin: None,
label: None,
version: None,
acquire_by_hash: Some(true),
checksums: BTreeSet::from_iter([ChecksumType::Md5, ChecksumType::Sha256]),
pool_layout: PoolLayout::default(),
index_file_compressions: BTreeSet::from_iter([
Compression::None,
Compression::Gzip,
Compression::Xz,
]),
binary_packages: ComponentBinaryPackages::default(),
installer_packages: ComponentBinaryPackages::default(),
source_packages: BTreeMap::default(),
translations: BTreeMap::default(),
}
}
pub fn new_recommended(
architectures: impl Iterator<Item = impl ToString>,
components: impl Iterator<Item = impl ToString>,
suite: impl ToString,
codename: impl ToString,
) -> Self {
Self {
architectures: BTreeSet::from_iter(architectures.map(|x| x.to_string())),
components: BTreeSet::from_iter(components.map(|x| x.to_string())),
suite: Some(suite.to_string()),
codename: Some(codename.to_string()),
..Self::new_recommended_empty()
}
}
pub fn add_architecture(&mut self, arch: impl ToString) {
self.architectures.insert(arch.to_string());
}
pub fn add_component(&mut self, name: impl ToString) {
self.components.insert(name.to_string());
}
pub fn add_checksum(&mut self, value: ChecksumType) {
self.checksums.insert(value);
}
pub fn set_suite(&mut self, value: impl ToString) {
self.suite = Some(value.to_string());
}
pub fn set_codename(&mut self, value: impl ToString) {
self.codename = Some(value.to_string());
}
pub fn set_date(&mut self, value: DateTime<Utc>) {
self.date = Some(value);
}
pub fn set_valid_until(&mut self, value: DateTime<Utc>) {
self.valid_until = Some(value);
}
pub fn set_description(&mut self, value: impl ToString) {
self.description = Some(value.to_string());
}
pub fn set_origin(&mut self, value: impl ToString) {
self.origin = Some(value.to_string());
}
pub fn set_label(&mut self, value: impl ToString) {
self.label = Some(value.to_string());
}
pub fn set_version(&mut self, value: impl ToString) {
self.version = Some(value.to_string());
}
pub fn set_acquire_by_hash(&mut self, value: bool) {
self.acquire_by_hash = Some(value);
}
pub fn set_pool_layout(&mut self, layout: PoolLayout) -> Result<()> {
if self.have_entries() {
Err(DebianError::RepositoryBuildPoolLayoutImmutable)
} else {
self.pool_layout = layout;
Ok(())
}
}
fn have_entries(&self) -> bool {
!self.binary_packages.is_empty()
|| !self.source_packages.is_empty()
|| !self.installer_packages.is_empty()
|| !self.translations.is_empty()
}
pub fn add_binary_deb(
&mut self,
component: &str,
deb: &impl DebPackageReference<'cf>,
) -> Result<String> {
if !self.components.contains(component) {
return Err(DebianError::RepositoryBuildUnknownComponent(
component.to_string(),
));
}
let original_control_file = deb.control_file_for_packages_index()?;
let package = original_control_file.package()?;
let version = original_control_file.version_str()?;
let arch = original_control_file.architecture()?;
if !self.architectures.contains(arch) {
return Err(DebianError::RepositoryBuildUnknownArchitecture(
arch.to_string(),
));
}
let mut para = ControlParagraph::default();
for field in original_control_file.iter_fields() {
if ![
"Description",
"Filename",
"Size",
"MD5sum",
"SHA1",
"SHA256",
]
.contains(&field.name())
{
para.set_field(field.clone());
}
}
if let Some(description) = original_control_file.field("Description") {
let description = description.value_str();
if let Some(index) = description.find('\n') {
let mut h = ChecksumType::Md5.new_hasher();
h.update(description.as_bytes());
h.update(b"\n");
let digest = h.finish();
para.set_field_from_string(
"Description".into(),
(&description[0..index]).to_string().into(),
);
para.set_field_from_string("Description-md5".into(), hex::encode(digest).into());
} else {
para.set_field_from_string("Description".into(), description.to_string().into());
}
}
let filename = self.pool_layout.path(
component,
if let Some(name) = original_control_file.source() {
name
} else {
package
},
&deb.deb_filename()?,
);
para.set_field_from_string("Filename".into(), filename.clone().into());
para.set_field_from_string("Size".into(), format!("{}", deb.deb_size_bytes()?).into());
for checksum in &self.checksums {
let digest = deb.deb_digest(*checksum)?;
para.set_field_from_string(checksum.field_name().into(), digest.digest_hex().into());
}
let component_key = (component.to_string(), arch.to_string());
let package_key = (package.to_string(), version.to_string());
self.binary_packages
.entry(component_key)
.or_default()
.insert(package_key, para);
Ok(filename)
}
pub fn binary_package_components(&self) -> impl Iterator<Item = (&str, &str)> + '_ {
self.binary_packages
.keys()
.map(|(a, b)| (a.as_str(), b.as_str()))
}
pub fn iter_component_binary_packages(
&self,
component: impl ToString,
architecture: impl ToString,
) -> Box<dyn Iterator<Item = &'_ ControlParagraph> + Send + '_> {
if let Some(packages) = self
.binary_packages
.get(&(component.to_string(), architecture.to_string()))
{
Box::new(packages.values())
} else {
Box::new(std::iter::empty())
}
}
pub fn iter_component_binary_package_pool_artifacts(
&self,
component: impl ToString,
architecture: impl ToString,
) -> impl Iterator<Item = Result<BinaryPackagePoolArtifact<'_>>> + '_ {
self.iter_component_binary_packages(component, architecture)
.map(|para| {
let path = para
.field_str("Filename")
.expect("Filename should have been populated at package add time");
let size = para
.field_u64("Size")
.expect("Size should have been populated at package add time")
.expect("Size should parse to an integer");
let strongest_checksum = self
.checksums
.iter()
.last()
.expect("should have at least 1 checksum defined");
let digest_hex = para
.field_str(strongest_checksum.field_name())
.expect("checksum's field should have been set");
let digest = ContentDigest::from_hex_digest(*strongest_checksum, digest_hex)?;
Ok(BinaryPackagePoolArtifact { path, size, digest })
})
}
pub fn component_binary_packages_reader(
&self,
component: impl ToString,
architecture: impl ToString,
) -> impl AsyncRead + '_ {
futures::stream::iter(
self.iter_component_binary_packages(component, architecture)
.map(|p| Ok(format!("{}\n", p.to_string()))),
)
.into_async_read()
}
pub fn component_binary_packages_reader_compression(
&self,
component: impl ToString,
architecture: impl ToString,
compression: Compression,
) -> Pin<Box<dyn AsyncRead + Send + '_>> {
read_compressed(
futures::io::BufReader::new(
self.component_binary_packages_reader(
component.to_string(),
architecture.to_string(),
),
),
compression,
)
}
pub fn binary_packages_index_readers(&self) -> impl Iterator<Item = IndexFileReader<'_>> + '_ {
self.binary_packages
.keys()
.flat_map(move |(component, architecture)| {
self.index_file_compressions
.iter()
.map(move |compression| IndexFileReader {
reader: self.component_binary_packages_reader_compression(
component,
architecture,
*compression,
),
compression: *compression,
directory: format!("{}/binary-{}", component, architecture),
filename: "Packages".to_string(),
})
})
}
pub fn index_file_readers(&self) -> impl Iterator<Item = IndexFileReader<'_>> + '_ {
self.binary_packages_index_readers()
}
pub fn iter_binary_packages_pool_artifacts(
&self,
) -> impl Iterator<Item = Result<BinaryPackagePoolArtifact<'_>>> + '_ {
self.binary_packages
.keys()
.flat_map(move |(component, architecture)| {
self.iter_component_binary_package_pool_artifacts(component, architecture)
})
}
pub async fn publish_pool_artifacts<F>(
&self,
resolver: &impl DataResolver,
writer: &impl RepositoryWriter,
threads: usize,
progress_cb: &Option<F>,
) -> Result<()>
where
F: Fn(PublishEvent),
{
let artifacts = self
.iter_binary_packages_pool_artifacts()
.collect::<Result<Vec<_>>>()?;
if let Some(ref cb) = progress_cb {
cb(PublishEvent::ResolvedPoolArtifacts(artifacts.len()));
}
let mut fs = futures::stream::iter(
artifacts
.iter()
.map(|a| writer.verify_path(a.path, Some((a.size, a.digest.clone())))),
)
.buffer_unordered(threads);
let mut missing_paths = BTreeSet::new();
while let Some(result) = fs.next().await {
let result = result?;
match result.state {
RepositoryPathVerificationState::ExistsNoIntegrityCheck
| RepositoryPathVerificationState::ExistsIntegrityVerified => {
if let Some(ref cb) = progress_cb {
cb(PublishEvent::PoolArtifactCurrent(result.path.to_string()));
}
}
RepositoryPathVerificationState::ExistsIntegrityMismatch
| RepositoryPathVerificationState::Missing => {
if let Some(ref cb) = progress_cb {
cb(PublishEvent::PoolArtifactMissing(result.path.to_string()));
}
missing_paths.insert(result.path);
}
}
}
if let Some(ref cb) = progress_cb {
cb(PublishEvent::PoolArtifactsToPublish(missing_paths.len()));
}
let mut fs = futures::stream::iter(
artifacts
.iter()
.filter(|a| missing_paths.contains(a.path))
.map(|a| get_path_and_copy(resolver, writer, a)),
)
.buffer_unordered(threads);
while let Some(artifact) = fs.next().await {
let artifact = artifact?;
if let Some(ref cb) = progress_cb {
cb(PublishEvent::PoolArtifactCreated(
artifact.path.to_string(),
artifact.size,
));
}
}
Ok(())
}
async fn expand_index_file_reader<'ifr, 'slf: 'ifr>(
&'slf self,
mut ifr: IndexFileReader<'ifr>,
) -> Result<Box<dyn Iterator<Item = ExpandedIndexFile> + 'ifr>> {
let mut buf = vec![];
ifr.reader.read_to_end(&mut buf).await?;
let mut digester = MultiDigester::default();
digester.update(&buf);
let digests = digester.finish();
if self.acquire_by_hash == Some(true) {
Ok(Box::new(self.checksums.iter().map(move |checksum| {
ExpandedIndexFile {
canonical_path: ifr.canonical_path(),
write_path: ifr.by_hash_path(digests.digest_from_checksum(*checksum)),
digests: digests.clone(),
data: buf.clone(),
}
})))
} else {
Ok(Box::new(std::iter::once(ExpandedIndexFile {
canonical_path: ifr.canonical_path(),
write_path: ifr.canonical_path(),
digests,
data: buf,
})))
}
}
fn static_release_fields(&self) -> impl Iterator<Item = ControlField<'_>> {
let mut fields: BTreeMap<Cow<'_, str>, Cow<'_, str>> = BTreeMap::new();
fields.insert(
"Components".into(),
self.components
.iter()
.map(|x| x.as_str())
.collect::<Vec<_>>()
.join(" ")
.into(),
);
fields.insert(
"Architectures".into(),
self.architectures
.iter()
.map(|x| x.as_str())
.collect::<Vec<_>>()
.join(" ")
.into(),
);
if let Some(suite) = &self.suite {
fields.insert("Suite".into(), suite.into());
}
if let Some(codename) = &self.codename {
fields.insert("Codename".into(), codename.into());
}
if let Some(date) = &self.date {
fields.insert(
"Date".into(),
format!("{}", date.format(DATE_FORMAT)).into(),
);
}
if let Some(valid_until) = &self.valid_until {
fields.insert(
"Valid-Until".into(),
format!("{}", valid_until.format(DATE_FORMAT)).into(),
);
}
if let Some(description) = &self.description {
fields.insert("Description".into(), description.into());
}
if let Some(origin) = &self.origin {
fields.insert("Origin".into(), origin.into());
}
if let Some(label) = &self.label {
fields.insert("Label".into(), label.into());
}
if let Some(version) = &self.version {
fields.insert("Version".into(), version.into());
}
if let Some(acquire_by_hash) = self.acquire_by_hash {
fields.insert(
"Acquire-By-Hash".into(),
if acquire_by_hash { "yes" } else { "no" }.into(),
);
}
fields.into_iter().map(|(k, v)| ControlField::new(k, v))
}
pub fn create_release_file(
&self,
indices: impl Iterator<Item = (String, (u64, MultiContentDigest))>,
) -> Result<ReleaseFile<'_>> {
let mut para = ControlParagraph::default();
for field in self.static_release_fields() {
para.set_field(field);
}
let mut digests_by_field = HashMap::new();
for (path, (size, digests)) in indices {
for digest in digests.iter_digests() {
digests_by_field
.entry(digest.release_field_name())
.or_insert_with(BTreeMap::new)
.insert(path.clone(), (size, digest.digest_hex()));
}
}
for checksum in self.checksums.iter() {
let default = BTreeMap::new();
let entries = digests_by_field
.get(checksum.field_name())
.unwrap_or(&default);
let longest_path = entries.keys().map(|x| x.len()).max().unwrap_or_default();
let longest_size = entries
.values()
.map(|(size, _)| format!("{}", size).len())
.max()
.unwrap_or_default();
para.set_field(ControlField::new(
checksum.field_name().into(),
std::iter::once("".to_string())
.chain(entries.iter().map(|(path, (size, digest))| {
format!(
" {:<path_width$} {:>size_width$} {}",
path,
size,
digest,
path_width = longest_path,
size_width = longest_size
)
}))
.collect::<Vec<_>>()
.join("\n")
.into(),
));
}
Ok(para.into())
}
pub async fn publish_indices<F, PW>(
&self,
writer: &impl RepositoryWriter,
path_prefix: Option<&str>,
threads: usize,
progress_cb: &Option<F>,
signing_key: Option<(&impl SecretKeyTrait, PW)>,
) -> Result<()>
where
F: Fn(PublishEvent),
PW: FnOnce() -> String,
{
let mut index_paths = BTreeMap::new();
let mut fs = futures::stream::iter(
self.index_file_readers()
.map(|ifr| self.expand_index_file_reader(ifr)),
)
.buffer_unordered(threads);
let mut iters = vec![];
while let Some(res) = fs.try_next().await? {
for mut eif in res {
if let Some(prefix) = path_prefix {
eif.write_path = format!("{}/{}", prefix.trim_matches('/'), eif.write_path);
}
if let Some(cb) = progress_cb {
cb(PublishEvent::IndexFileToWrite(eif.write_path.clone()));
}
index_paths.insert(
eif.canonical_path.clone(),
(eif.data.len() as u64, eif.digests.clone()),
);
iters.push(eif);
}
}
let mut fs = futures::stream::iter(iters.into_iter().map(|eif| {
writer.write_path(
eif.write_path.into(),
Box::pin(futures::io::Cursor::new(eif.data)),
)
}))
.buffer_unordered(threads);
while let Some(write) = fs.try_next().await? {
if let Some(cb) = progress_cb {
cb(PublishEvent::IndexFileWritten(
write.path.to_string(),
write.bytes_written,
));
}
}
let release = self.create_release_file(index_paths.into_iter())?;
let (release_path, inrelease_path) = if let Some(prefix) = path_prefix {
(
format!("{}/Release", prefix.trim_matches('/')),
format!("{}/InRelease", prefix.trim_matches('/')),
)
} else {
("Release".to_string(), "InRelease".to_string())
};
if let Some(cb) = progress_cb {
cb(PublishEvent::IndexFileToWrite(release_path.clone()))
}
let release_write = writer
.write_path(
release_path.into(),
Box::pin(futures::io::Cursor::new(release.to_string().into_bytes())),
)
.await?;
if let Some(cb) = progress_cb {
cb(PublishEvent::IndexFileWritten(
release_write.path.to_string(),
release_write.bytes_written,
));
}
if let Some((key, password)) = signing_key {
let inrelease_content = cleartext_sign(
key,
password,
HashAlgorithm::SHA2_256,
std::io::Cursor::new(release.to_string().as_bytes()),
)?;
if let Some(cb) = progress_cb {
cb(PublishEvent::IndexFileToWrite(inrelease_path.clone()));
}
let inrelease_write = writer
.write_path(
inrelease_path.into(),
Box::pin(futures::io::Cursor::new(inrelease_content.into_bytes())),
)
.await?;
if let Some(cb) = progress_cb {
cb(PublishEvent::IndexFileWritten(
inrelease_write.path.to_string(),
inrelease_write.bytes_written,
));
}
}
Ok(())
}
pub async fn publish<F, PW>(
&self,
writer: &impl RepositoryWriter,
resolver: &impl DataResolver,
distribution_path: &str,
threads: usize,
progress_cb: &Option<F>,
signing_key: Option<(&impl SecretKeyTrait, PW)>,
) -> Result<()>
where
F: Fn(PublishEvent),
PW: FnOnce() -> String,
{
self.publish_pool_artifacts(resolver, writer, threads, progress_cb)
.await?;
self.publish_indices(
writer,
Some(distribution_path),
threads,
progress_cb,
signing_key,
)
.await?;
Ok(())
}
}
async fn get_path_and_copy<'a, 'b>(
resolver: &impl DataResolver,
writer: &impl RepositoryWriter,
artifact: &'a BinaryPackagePoolArtifact<'b>,
) -> Result<&'a BinaryPackagePoolArtifact<'b>> {
let reader = resolver
.get_path_with_digest_verification(artifact.path, artifact.size, artifact.digest.clone())
.await?;
writer.write_path(artifact.path.into(), reader).await?;
Ok(artifact)
}
#[cfg(test)]
mod test {
use {
super::*,
crate::{
io::PathMappingDataResolver,
repository::{
http::HttpRepositoryClient, RepositoryPathVerification,
RepositoryPathVerificationState, RepositoryRootReader, RepositoryWrite,
},
signing_key::{create_self_signed_key, signing_secret_key_params_builder},
},
async_trait::async_trait,
futures::AsyncReadExt,
std::borrow::Cow,
};
const BULLSEYE_URL: &str = "http://snapshot.debian.org/archive/debian/20211120T085721Z";
#[derive(Default)]
struct CapturingWriter {
paths: std::sync::Mutex<HashMap<String, Vec<u8>>>,
}
impl CapturingWriter {
fn get_path(&self, path: impl ToString) -> Option<Vec<u8>> {
self.paths.lock().unwrap().get(&path.to_string()).cloned()
}
}
#[async_trait]
impl RepositoryWriter for CapturingWriter {
async fn verify_path<'path>(
&self,
path: &'path str,
_expected_content: Option<(u64, ContentDigest)>,
) -> Result<RepositoryPathVerification<'path>> {
Ok(RepositoryPathVerification {
path,
state: RepositoryPathVerificationState::Missing,
})
}
async fn write_path<'path, 'reader>(
&self,
path: Cow<'path, str>,
reader: Pin<Box<dyn AsyncRead + Send + 'reader>>,
) -> Result<RepositoryWrite<'path>> {
let mut writer = futures::io::Cursor::new(Vec::<u8>::new());
let bytes_written = futures::io::copy(reader, &mut writer)
.await
.map_err(|e| DebianError::RepositoryIoPath(path.to_string(), e))?;
self.paths
.lock()
.unwrap()
.insert(path.to_string(), writer.into_inner());
Ok(RepositoryWrite {
path,
bytes_written,
})
}
}
#[test]
fn pool_layout_paths() {
let layout = PoolLayout::ComponentThenNamePrefix;
assert_eq!(
layout.path("main", "python3.9", "python3.9_3.9.9-1_arm64.deb"),
"pool/main/p/python3.9/python3.9_3.9.9-1_arm64.deb"
);
assert_eq!(
layout.path("main", "libzstd", "zstd_1.4.8+dfsg-2.1_amd64.deb"),
"pool/main/libz/libzstd/zstd_1.4.8+dfsg-2.1_amd64.deb"
);
}
#[tokio::test]
async fn bullseye_binary_packages_reader() -> Result<()> {
let root = HttpRepositoryClient::new(BULLSEYE_URL).unwrap();
let release = root.release_reader("bullseye").await.unwrap();
let packages = release
.resolve_packages("main", "amd64", false)
.await
.unwrap();
let mut builder = RepositoryBuilder::new_recommended(
["all", "amd64"].iter(),
["main"].iter(),
"suite",
"codename",
);
let mut mapping_resolver = PathMappingDataResolver::new(root);
for package in packages
.iter()
.filter(|cf| {
if let Some(Ok(size)) = cf.size() {
size < 1000000
} else {
false
}
})
.take(10)
{
let dest_filename = builder.add_binary_deb("main", package)?;
let source_filename = package.field_str("Filename").unwrap();
mapping_resolver.add_path_map(dest_filename, source_filename);
}
let pool_artifacts = builder
.iter_binary_packages_pool_artifacts()
.collect::<Result<Vec<_>>>()?;
assert_eq!(pool_artifacts.len(), 10);
let mut entries = builder.binary_packages_index_readers().collect::<Vec<_>>();
assert_eq!(entries.len(), 6);
assert!(entries
.iter()
.all(|entry| entry.canonical_path().starts_with("main/binary-")));
for entry in entries.iter_mut() {
let mut buf = vec![];
entry.reader.read_to_end(&mut buf).await.unwrap();
}
let writer = CapturingWriter::default();
let cb = |event| {
eprintln!("{}", event);
};
let passwd_fn = String::new;
let signed_secret_key = create_self_signed_key(
signing_secret_key_params_builder("Me <someone@example.com>")
.build()
.unwrap(),
passwd_fn,
)
.unwrap()
.0;
builder
.publish(
&writer,
&mapping_resolver,
"dists/mydist",
10,
&Some(cb),
Some((&signed_secret_key, passwd_fn)),
)
.await?;
let wanted_paths = vec!["dists/mydist/Release", "dists/mydist/InRelease"];
assert!(wanted_paths.iter().all(|path| writer
.paths
.lock()
.unwrap()
.contains_key(&path.to_string())));
let release = ReleaseFile::from_armored_reader(std::io::Cursor::new(
writer.get_path("dists/mydist/InRelease").unwrap(),
))
.unwrap();
let signatures = release
.signatures()
.expect("PGP signatures should have been parsed");
assert_eq!(
signatures
.iter_signatures_from_key(&signed_secret_key)
.count(),
1
);
signatures.verify(&signed_secret_key).unwrap();
Ok(())
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
repository::{filesystem::FilesystemRepositoryWriter, reader_from_str},
signing_key::{create_self_signed_key, signing_secret_key_params_builder},
},
tempfile::TempDir,
};
fn temp_dir() -> Result<TempDir> {
Ok(tempfile::Builder::new()
.prefix("debian-packaging-test-")
.tempdir()?)
}
#[tokio::test]
async fn publish_empty() -> Result<()> {
let td = temp_dir()?;
let mut builder = RepositoryBuilder::new_recommended(
["amd64"].into_iter(),
["main"].into_iter(),
"suite",
"codename",
);
builder.set_description("description");
builder.set_version("1");
let writer = FilesystemRepositoryWriter::new(td.path());
let key_params = signing_secret_key_params_builder("someone@example.com")
.build()
.unwrap();
let key = create_self_signed_key(key_params, String::new)?.0;
builder
.publish_indices(
&writer,
Some("dists/dist"),
1,
&NO_PROGRESS_CB,
Some((&key, String::new)),
)
.await?;
let reader = reader_from_str(format!("file://{}", td.path().display()))?;
let release_reader = reader.release_reader("dist").await?;
let indices = release_reader.classified_indices_entries()?;
assert!(indices.is_empty());
Ok(())
}
}