1use crate::archive::{
8 add_package_to_zone_archive, create_tarfile, open_tarfile, ArchiveBuilder, AsyncAppendFile,
9 Encoder,
10};
11use crate::blob::{self, BLOB};
12use crate::cache::{Cache, CacheError};
13use crate::config::{PackageName, ServiceName};
14use crate::input::{BuildInput, BuildInputs, MappedPath, TargetDirectory, TargetPackage};
15use crate::progress::{NoProgress, Progress};
16use crate::target::TargetMap;
17use crate::timer::BuildTimer;
18
19use anyhow::{anyhow, bail, Context, Result};
20use camino::{Utf8Path, Utf8PathBuf};
21use flate2::write::GzEncoder;
22use serde::{Deserialize, Serialize};
23use std::collections::BTreeMap;
24use std::convert::TryFrom;
25use std::fs::File;
26use tar::Builder;
27use tokio::io::{AsyncSeekExt, AsyncWriteExt};
28
29fn zone_archive_path(path: &Utf8Path) -> Result<Utf8PathBuf> {
35 let leading_slash = std::path::MAIN_SEPARATOR.to_string();
36 Ok(Utf8Path::new("root").join(path.strip_prefix(leading_slash)?))
37}
38
39fn zone_get_all_parent_inputs(to: &Utf8Path) -> Result<Vec<TargetDirectory>> {
52 let mut parents: Vec<&Utf8Path> = to.ancestors().collect::<Vec<&Utf8Path>>();
53 parents.reverse();
54
55 if to.is_relative() {
56 bail!("Cannot add 'to = {to}'; absolute path required");
57 }
58
59 let mut outputs = vec![];
60 for parent in parents {
61 let dst = zone_archive_path(parent)?;
62 outputs.push(TargetDirectory(dst))
63 }
64 Ok(outputs)
65}
66
67#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
72pub struct PrebuiltBlob {
73 pub repo: String,
74 pub series: String,
75 pub commit: String,
76 pub artifact: String,
77 pub sha256: String,
78}
79
80#[derive(Clone, Deserialize, Serialize, Debug, PartialEq)]
82#[serde(tag = "type", rename_all = "lowercase")]
83pub enum PackageSource {
84 Local {
86 blobs: Option<Vec<Utf8PathBuf>>,
89
90 buildomat_blobs: Option<Vec<PrebuiltBlob>>,
92
93 rust: Option<RustPackage>,
95
96 #[serde(default)]
98 paths: Vec<InterpolatedMappedPath>,
99 },
100
101 Prebuilt {
105 repo: String,
106 commit: String,
107 sha256: String,
108 },
109
110 Composite { packages: Vec<String> },
114
115 Manual,
118}
119
120impl PackageSource {
121 fn rust_package(&self) -> Option<&RustPackage> {
122 match self {
123 PackageSource::Local {
124 rust: Some(rust_pkg),
125 ..
126 } => Some(rust_pkg),
127 _ => None,
128 }
129 }
130
131 fn blobs(&self) -> Option<&[Utf8PathBuf]> {
132 match self {
133 PackageSource::Local {
134 blobs: Some(blobs), ..
135 } => Some(blobs),
136 _ => None,
137 }
138 }
139
140 fn buildomat_blobs(&self) -> Option<&[PrebuiltBlob]> {
141 match self {
142 PackageSource::Local {
143 buildomat_blobs: Some(buildomat_blobs),
144 ..
145 } => Some(buildomat_blobs),
146 _ => None,
147 }
148 }
149}
150
151#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
153#[serde(tag = "type", rename_all = "lowercase")]
154pub enum PackageOutput {
155 Zone {
157 #[serde(default)]
162 intermediate_only: bool,
163 },
164 Tarball,
166}
167
168#[derive(Clone, Deserialize, Serialize, Debug, PartialEq)]
170pub struct Package {
171 pub service_name: ServiceName,
173
174 pub source: PackageSource,
179
180 pub output: PackageOutput,
182
183 pub only_for_targets: Option<TargetMap>,
187
188 #[serde(default)]
190 pub setup_hint: Option<String>,
191}
192
193const DEFAULT_VERSION: semver::Version = semver::Version::new(0, 0, 0);
195
196async fn new_zone_archive_builder(
197 package_name: &PackageName,
198 output_directory: &Utf8Path,
199) -> Result<ArchiveBuilder<GzEncoder<File>>> {
200 let tarfile = output_directory.join(format!("{}.tar.gz", package_name));
201 crate::archive::new_compressed_archive_builder(&tarfile).await
202}
203
204pub struct BuildConfig<'a> {
206 pub target: &'a TargetMap,
208
209 pub progress: &'a dyn Progress,
211
212 pub cache_disabled: bool,
214}
215
216static DEFAULT_TARGET: TargetMap = TargetMap(BTreeMap::new());
217static DEFAULT_PROGRESS: NoProgress = NoProgress::new();
218
219impl Default for BuildConfig<'_> {
220 fn default() -> Self {
221 Self {
222 target: &DEFAULT_TARGET,
223 progress: &DEFAULT_PROGRESS,
224 cache_disabled: false,
225 }
226 }
227}
228
229impl Package {
230 pub fn get_output_path(&self, id: &PackageName, output_directory: &Utf8Path) -> Utf8PathBuf {
232 output_directory.join(self.get_output_file(id))
233 }
234
235 pub fn get_output_path_for_service(&self, install_directory: &Utf8Path) -> Utf8PathBuf {
237 install_directory.join(self.get_output_file_for_service())
238 }
239
240 pub fn get_stamped_output_path(
242 &self,
243 name: &PackageName,
244 output_directory: &Utf8Path,
245 ) -> Utf8PathBuf {
246 output_directory
247 .join("versioned")
248 .join(self.get_output_file(name))
249 }
250
251 pub fn get_output_file(&self, name: &PackageName) -> String {
253 match self.output {
254 PackageOutput::Zone { .. } => format!("{}.tar.gz", name),
255 PackageOutput::Tarball => format!("{}.tar", name),
256 }
257 }
258
259 pub fn get_output_file_for_service(&self) -> String {
260 match self.output {
261 PackageOutput::Zone { .. } => format!("{}.tar.gz", self.service_name),
262 PackageOutput::Tarball => format!("{}.tar", self.service_name),
263 }
264 }
265
266 #[deprecated = "Use 'Package::create', which now takes a 'BuildConfig', and implements 'Default'"]
267 pub async fn create_for_target(
268 &self,
269 target: &TargetMap,
270 name: &PackageName,
271 output_directory: &Utf8Path,
272 ) -> Result<File> {
273 let build_config = BuildConfig {
274 target,
275 ..Default::default()
276 };
277 self.create_internal(name, output_directory, &build_config)
278 .await
279 }
280
281 pub async fn create(
282 &self,
283 name: &PackageName,
284 output_directory: &Utf8Path,
285 build_config: &BuildConfig<'_>,
286 ) -> Result<File> {
287 self.create_internal(name, output_directory, build_config)
288 .await
289 }
290
291 pub async fn stamp(
292 &self,
293 name: &PackageName,
294 output_directory: &Utf8Path,
295 version: &semver::Version,
296 ) -> Result<Utf8PathBuf> {
297 let stamp_path = self.get_stamped_output_path(name, output_directory);
298 std::fs::create_dir_all(stamp_path.parent().unwrap())?;
299
300 match self.output {
301 PackageOutput::Zone { .. } => {
302 let mut inputs = BuildInputs::new();
303 inputs.0.push(self.get_version_input(name, Some(version)));
304 inputs.0.push(BuildInput::AddPackage(TargetPackage(
305 self.get_output_path(name, output_directory),
306 )));
307
308 let mut archive =
314 new_zone_archive_builder(name, stamp_path.parent().unwrap()).await?;
315 for input in inputs.0.iter() {
316 self.add_input_to_package(&NoProgress::new(), &mut archive, input)
317 .await
318 .with_context(|| format!("Adding input {input:?}"))?;
319 }
320
321 archive
323 .builder
324 .into_inner()
325 .map_err(|err| anyhow!("Failed to finalize archive: {}", err))?
326 .finish()?;
327 }
328 PackageOutput::Tarball => {
329 let original_file = self.get_output_path(name, output_directory);
331 let mut reader = tar::Archive::new(open_tarfile(&original_file)?);
332 let tmp = camino_tempfile::tempdir()?;
333 reader.unpack(tmp.path())?;
334
335 if let Err(err) = std::fs::remove_file(tmp.path().join("VERSION")) {
337 if err.kind() != std::io::ErrorKind::NotFound {
338 return Err(err.into());
339 }
340 }
341
342 let file = create_tarfile(&stamp_path)?;
344 let mut archive = Builder::new(file);
346 archive.mode(tar::HeaderMode::Deterministic);
347 archive.append_dir_all_async(".", tmp.path()).await?;
348
349 self.add_stamp_to_tarball_package(&mut archive, version)
350 .await?;
351
352 archive.finish()?;
354 }
355 }
356 Ok(stamp_path)
357 }
358
359 #[deprecated = "Use 'Package::create', which now takes a 'BuildConfig', and implements 'Default'"]
362 pub async fn create_with_progress_for_target(
363 &self,
364 progress: &impl Progress,
365 target: &TargetMap,
366 name: &PackageName,
367 output_directory: &Utf8Path,
368 ) -> Result<File> {
369 let config = BuildConfig {
370 target,
371 progress,
372 ..Default::default()
373 };
374 self.create_internal(name, output_directory, &config).await
375 }
376
377 async fn create_internal(
378 &self,
379 name: &PackageName,
380 output_directory: &Utf8Path,
381 config: &BuildConfig<'_>,
382 ) -> Result<File> {
383 let mut timer = BuildTimer::new();
384 let output = match self.output {
385 PackageOutput::Zone { .. } => {
386 self.create_zone_package(&mut timer, name, output_directory, config)
387 .await?
388 }
389 PackageOutput::Tarball => {
390 self.create_tarball_package(name, output_directory, config)
391 .await?
392 }
393 };
394
395 timer.log_all(config.progress.get_log());
396 Ok(output)
397 }
398
399 fn get_version_input(
401 &self,
402 package_name: &PackageName,
403 version: Option<&semver::Version>,
404 ) -> BuildInput {
405 match &self.output {
406 PackageOutput::Zone { .. } => {
407 let version = version.cloned().unwrap_or(DEFAULT_VERSION);
412 let version = &version.to_string();
413
414 let kvs = vec![
415 ("v", "1"),
416 ("t", "layer"),
417 ("pkg", package_name.as_ref()),
418 ("version", version),
419 ];
420
421 let contents = String::from("{")
422 + &kvs
423 .into_iter()
424 .map(|(k, v)| format!("\"{k}\":\"{v}\""))
425 .collect::<Vec<String>>()
426 .join(",")
427 + "}";
428
429 BuildInput::AddInMemoryFile {
430 dst_path: "oxide.json".into(),
431 contents,
432 }
433 }
434 PackageOutput::Tarball => {
435 let version = version.cloned().unwrap_or(DEFAULT_VERSION);
436 let contents = version.to_string();
437 BuildInput::AddInMemoryFile {
438 dst_path: "VERSION".into(),
439 contents,
440 }
441 }
442 }
443 }
444
445 fn get_paths_inputs(
446 &self,
447 target: &TargetMap,
448 paths: &Vec<InterpolatedMappedPath>,
449 ) -> Result<BuildInputs> {
450 let mut inputs = BuildInputs::new();
451
452 for path in paths {
453 let path = path.interpolate(target)?;
454 let from = path.from;
455 let to = path.to;
456
457 match self.output {
458 PackageOutput::Zone { .. } => {
459 inputs.0.extend(
462 zone_get_all_parent_inputs(to.parent().unwrap())?
463 .into_iter()
464 .map(BuildInput::AddDirectory),
465 );
466 }
467 PackageOutput::Tarball => {}
468 }
469 if !from.exists() {
470 bail!(
473 "Cannot add path \"{}\" to package \"{}\" because it does not exist",
474 from,
475 self.service_name,
476 );
477 }
478
479 let from_root = std::fs::canonicalize(&from)
480 .map_err(|e| anyhow!("failed to canonicalize \"{}\": {}", from, e))?;
481 let entries = walkdir::WalkDir::new(&from_root)
482 .follow_links(true)
484 .sort_by_file_name();
486 for entry in entries {
487 let entry = entry?;
488 let dst = if from.is_dir() {
489 to.join(<&Utf8Path>::try_from(
492 entry.path().strip_prefix(&from_root)?,
493 )?)
494 } else {
495 assert_eq!(entry.path(), from_root.as_path());
497 to.clone()
498 };
499
500 let dst = match self.output {
501 PackageOutput::Zone { .. } => {
502 zone_archive_path(&dst)?
505 }
506 PackageOutput::Tarball => dst,
507 };
508
509 if entry.file_type().is_dir() {
510 inputs
511 .0
512 .push(BuildInput::AddDirectory(TargetDirectory(dst)));
513 } else if entry.file_type().is_file() {
514 let src = <&Utf8Path>::try_from(entry.path())?;
515 inputs.0.push(BuildInput::add_file(MappedPath {
516 from: src.to_path_buf(),
517 to: dst,
518 })?);
519 } else {
520 panic!(
521 "Unsupported file type: {:?} for {:?}",
522 entry.file_type(),
523 entry
524 );
525 }
526 }
527 }
528
529 Ok(inputs)
530 }
531
532 fn get_all_inputs(
533 &self,
534 package_name: &PackageName,
535 target: &TargetMap,
536 output_directory: &Utf8Path,
537 zoned: bool,
538 version: Option<&semver::Version>,
539 ) -> Result<BuildInputs> {
540 let mut all_paths = BuildInputs::new();
541
542 all_paths
544 .0
545 .push(self.get_version_input(package_name, version));
546
547 match &self.source {
548 PackageSource::Local { paths, .. } => {
549 all_paths.0.extend(self.get_paths_inputs(target, paths)?.0);
550 all_paths.0.extend(self.get_rust_inputs()?.0);
551 all_paths
552 .0
553 .extend(self.get_blobs_inputs(output_directory, zoned)?.0);
554 }
555 PackageSource::Composite { packages } => {
556 for component_package in packages {
557 all_paths.0.push(BuildInput::AddPackage(TargetPackage(
558 output_directory.join(component_package),
559 )));
560 }
561 }
562 _ => {
563 bail!(
564 "Cannot walk over a zone package with source: {:?}",
565 self.source
566 );
567 }
568 }
569
570 Ok(all_paths)
571 }
572
573 fn get_rust_inputs(&self) -> Result<BuildInputs> {
574 let mut inputs = BuildInputs::new();
575 if let Some(rust_pkg) = self.source.rust_package() {
576 let dst_directory = match self.output {
577 PackageOutput::Zone { .. } => {
578 let dst = Utf8Path::new("/opt/oxide")
579 .join(self.service_name.as_str())
580 .join("bin");
581 inputs.0.extend(
582 zone_get_all_parent_inputs(&dst)?
583 .into_iter()
584 .map(BuildInput::AddDirectory),
585 );
586
587 zone_archive_path(&dst)?
588 }
589 PackageOutput::Tarball => Utf8PathBuf::from(""),
590 };
591
592 for binary in &rust_pkg.binary_names {
593 let from = RustPackage::local_binary_path(binary, rust_pkg.release);
594 let to = dst_directory.join(binary);
595 inputs
596 .0
597 .push(BuildInput::add_file(MappedPath { from, to })?);
598 }
599 }
600 Ok(inputs)
601 }
602
603 fn get_blobs_inputs(&self, download_directory: &Utf8Path, zoned: bool) -> Result<BuildInputs> {
604 let mut inputs = BuildInputs::new();
605
606 if self.source.blobs().is_none() && self.source.buildomat_blobs().is_none() {
611 return Ok(inputs);
612 }
613
614 let destination_path = if zoned {
615 let dst = Utf8Path::new("/opt/oxide")
616 .join(self.service_name.as_str())
617 .join(BLOB);
618
619 inputs.0.extend(
620 zone_get_all_parent_inputs(&dst)?
621 .into_iter()
622 .map(BuildInput::AddDirectory),
623 );
624
625 zone_archive_path(&dst)?
626 } else {
627 Utf8PathBuf::from(BLOB)
628 };
629
630 if let Some(s3_blobs) = self.source.blobs() {
631 inputs.0.extend(s3_blobs.iter().map(|blob| {
632 let from = download_directory
633 .join(self.service_name.as_str())
634 .join(blob);
635 let to = destination_path.join(blob);
636 BuildInput::AddBlob {
637 path: MappedPath { from, to },
638 blob: crate::blob::Source::S3(blob.clone()),
639 }
640 }))
641 }
642 if let Some(buildomat_blobs) = self.source.buildomat_blobs() {
643 inputs.0.extend(buildomat_blobs.iter().map(|blob| {
644 let from = download_directory
645 .join(self.service_name.as_str())
646 .join(&blob.artifact);
647 let to = destination_path.join(&blob.artifact);
648 BuildInput::AddBlob {
649 path: MappedPath { from, to },
650 blob: crate::blob::Source::Buildomat(blob.clone()),
651 }
652 }));
653 }
654 Ok(inputs)
655 }
656
657 async fn create_zone_package(
658 &self,
659 timer: &mut BuildTimer,
660 name: &PackageName,
661 output_directory: &Utf8Path,
662 config: &BuildConfig<'_>,
663 ) -> Result<File> {
664 let target = &config.target;
665 let progress = &config.progress;
666 let mut cache = Cache::new(output_directory).await?;
667 cache.set_disable(config.cache_disabled);
668 timer.start("walking paths (identifying all inputs)");
669
670 progress.set_message("Identifying inputs".into());
671 let zoned = true;
672 let inputs = self
673 .get_all_inputs(name, target, output_directory, zoned, None)
674 .context("Identifying all input paths")?;
675 progress.increment_total(inputs.0.len() as u64);
676
677 let output_file = self.get_output_file(name);
678 let output_path = output_directory.join(&output_file);
679
680 timer.start("cache lookup");
682
683 match cache.lookup(&inputs, &output_path).await {
684 Ok(_) => {
685 timer.finish_with_label("Cache hit")?;
686 progress.set_message("Cache hit".into());
687 return Ok(File::open(output_path)?);
688 }
689 Err(CacheError::CacheMiss { reason }) => {
690 timer.finish_with_label(format!("Cache miss: {reason}"))?;
691 progress.set_message("Cache miss".into());
692 }
693 Err(CacheError::Other(other)) => {
694 return Err(other).context("Reading from package cache");
695 }
696 }
697
698 timer.start("add inputs to package");
700 let mut archive = new_zone_archive_builder(name, output_directory).await?;
701
702 for input in inputs.0.iter() {
703 self.add_input_to_package(&**progress, &mut archive, input)
704 .await
705 .with_context(|| format!("Adding input {input:?}"))?;
706 }
707 timer.start("finalize archive");
708 let file = archive.into_inner()?.finish()?;
709
710 timer.start("update cache manifest");
712 progress.set_message("Updating cached copy".into());
713
714 cache
715 .update(&inputs, &output_path)
716 .await
717 .context("Updating package cache")?;
718
719 timer.finish()?;
720 Ok(file)
721 }
722
723 async fn add_stamp_to_tarball_package(
724 &self,
725 archive: &mut Builder<File>,
726 version: &semver::Version,
727 ) -> Result<()> {
728 let mut version_file = tokio::fs::File::from_std(camino_tempfile::tempfile()?);
730 version_file
731 .write_all(version.to_string().as_bytes())
732 .await?;
733 version_file.seek(std::io::SeekFrom::Start(0)).await?;
734 let version_filename = Utf8Path::new("VERSION");
735 archive
736 .append_file_async(version_filename, &mut version_file.into_std().await)
737 .await?;
738 Ok(())
739 }
740
741 async fn add_input_to_package<E: Encoder>(
742 &self,
743 progress: &dyn Progress,
744 archive: &mut ArchiveBuilder<E>,
745 input: &BuildInput,
746 ) -> Result<()> {
747 match &input {
748 BuildInput::AddInMemoryFile { dst_path, contents } => {
749 let mut src_file = tokio::fs::File::from_std(camino_tempfile::tempfile()?);
750 src_file.write_all(contents.as_bytes()).await?;
751 src_file.seek(std::io::SeekFrom::Start(0)).await?;
752 archive
753 .builder
754 .append_file_async(dst_path, &mut src_file.into_std().await)
755 .await?;
756 }
757 BuildInput::AddDirectory(dir) => archive.builder.append_dir(&dir.0, ".")?,
758 BuildInput::AddFile { mapped_path, .. } => {
759 let src = &mapped_path.from;
760 let dst = &mapped_path.to;
761 progress.set_message(format!("adding file: {}", src).into());
762 archive
763 .builder
764 .append_path_with_name_async(src, dst)
765 .await
766 .context(format!("Failed to add file '{}' to '{}'", src, dst,))?;
767 }
768 BuildInput::AddBlob { path, blob } => {
769 let blobs_path = path.from.parent().unwrap();
777 std::fs::create_dir_all(blobs_path)?;
778
779 let blob_path = match &blob {
780 blob::Source::S3(s) => blobs_path.join(s),
781 blob::Source::Buildomat(spec) => blobs_path.join(&spec.artifact),
782 };
783
784 blob::download(progress, blob, &blob_path)
785 .await
786 .with_context(|| format!("failed to download blob: {}", blob.get_url()))?;
787
788 let src = &blob_path;
789 let dst = &path.to;
790 progress.set_message(format!("adding file: {}", src).into());
791 archive
792 .builder
793 .append_path_with_name_async(src, dst)
794 .await
795 .context(format!("Failed to add file '{}' to '{}'", src, dst,))?;
796 }
797 BuildInput::AddPackage(component_package) => {
798 progress.set_message(format!("adding package: {}", component_package.0).into());
799 add_package_to_zone_archive(archive, &component_package.0).await?;
800 }
801 }
802 progress.increment_completed(1);
803 Ok(())
804 }
805
806 async fn create_tarball_package(
807 &self,
808 name: &PackageName,
809 output_directory: &Utf8Path,
810 config: &BuildConfig<'_>,
811 ) -> Result<File> {
812 let progress = &config.progress;
813
814 if !matches!(self.source, PackageSource::Local { .. }) {
815 bail!("Cannot create non-local tarball");
816 }
817
818 let output_path = self.get_output_path(name, output_directory);
819 let mut cache = Cache::new(output_directory).await?;
820 cache.set_disable(config.cache_disabled);
821
822 let zoned = false;
823 let inputs = self
824 .get_all_inputs(name, config.target, output_directory, zoned, None)
825 .context("Identifying all input paths")?;
826 progress.increment_total(inputs.0.len() as u64);
827
828 match cache.lookup(&inputs, &output_path).await {
829 Ok(_) => {
830 progress.set_message("Cache hit".into());
831 return Ok(File::open(output_path)?);
832 }
833 Err(CacheError::CacheMiss { reason: _ }) => {
834 progress.set_message("Cache miss".into());
835 }
836 Err(CacheError::Other(other)) => {
837 return Err(other).context("Reading from package cache");
838 }
839 }
840
841 let file = create_tarfile(&output_path)?;
842 let mut archive = ArchiveBuilder::new(Builder::new(file));
844 archive.builder.mode(tar::HeaderMode::Deterministic);
845
846 for input in inputs.0.iter() {
847 self.add_input_to_package(&**progress, &mut archive, input)
848 .await?;
849 }
850
851 let file = archive
852 .builder
853 .into_inner()
854 .map_err(|err| anyhow!("Failed to finalize archive: {}", err))?;
855
856 progress.set_message("Updating cached copy".into());
857 cache
858 .update(&inputs, &output_path)
859 .await
860 .context("Updating package cache")?;
861
862 Ok(file)
863 }
864}
865
866#[derive(Clone, Deserialize, Serialize, Debug, PartialEq)]
868pub struct RustPackage {
869 pub binary_names: Vec<String>,
873
874 pub release: bool,
876}
877
878impl RustPackage {
879 fn local_binary_path(name: &str, release: bool) -> Utf8PathBuf {
881 format!(
882 "target/{}/{}",
883 if release { "release" } else { "debug" },
884 name,
885 )
886 .into()
887 }
888}
889
890#[derive(Clone, Deserialize, Serialize, Debug, PartialEq)]
892pub struct InterpolatedString(String);
893
894impl InterpolatedString {
895 pub fn interpolate(&self, target: &TargetMap) -> Result<String> {
898 let mut input = self.0.as_str();
899 let mut output = String::new();
900
901 const START_STR: &str = "{{";
902 const END_STR: &str = "}}";
903
904 while let Some(sub_idx) = input.find(START_STR) {
905 output.push_str(&input[..sub_idx]);
906 input = &input[sub_idx + START_STR.len()..];
907
908 let Some(end_idx) = input.find(END_STR) else {
909 bail!("Missing closing '{END_STR}' character in '{}'", self.0);
910 };
911 let key = &input[..end_idx];
912 let Some(value) = target.0.get(key) else {
913 bail!(
914 "Key '{key}' not found in target, but required in '{}'",
915 self.0
916 );
917 };
918 output.push_str(value);
919 input = &input[end_idx + END_STR.len()..];
920 }
921 output.push_str(input);
922 Ok(output)
923 }
924}
925
926#[derive(Clone, Deserialize, Serialize, Debug, PartialEq)]
931pub struct InterpolatedMappedPath {
932 pub from: InterpolatedString,
934 pub to: InterpolatedString,
936}
937
938impl InterpolatedMappedPath {
939 fn interpolate(&self, target: &TargetMap) -> Result<MappedPath> {
940 Ok(MappedPath {
941 from: Utf8PathBuf::from(self.from.interpolate(target)?),
942 to: Utf8PathBuf::from(self.to.interpolate(target)?),
943 })
944 }
945}
946
947#[cfg(test)]
948mod test {
949 use super::*;
950
951 #[test]
952 fn interpolate_noop() {
953 let target = TargetMap(BTreeMap::new());
954 let is = InterpolatedString(String::from("nothing to change"));
955
956 let s = is.interpolate(&target).unwrap();
957 assert_eq!(s, is.0);
958 }
959
960 #[test]
961 fn interpolate_single() {
962 let mut target = TargetMap(BTreeMap::new());
963 target.0.insert("key1".to_string(), "value1".to_string());
964 let is = InterpolatedString(String::from("{{key1}}"));
965
966 let s = is.interpolate(&target).unwrap();
967 assert_eq!(s, "value1");
968 }
969
970 #[test]
971 fn interpolate_single_with_prefix() {
972 let mut target = TargetMap(BTreeMap::new());
973 target.0.insert("key1".to_string(), "value1".to_string());
974 let is = InterpolatedString(String::from("prefix-{{key1}}"));
975
976 let s = is.interpolate(&target).unwrap();
977 assert_eq!(s, "prefix-value1");
978 }
979
980 #[test]
981 fn interpolate_single_with_suffix() {
982 let mut target = TargetMap(BTreeMap::new());
983 target.0.insert("key1".to_string(), "value1".to_string());
984 let is = InterpolatedString(String::from("{{key1}}-suffix"));
985
986 let s = is.interpolate(&target).unwrap();
987 assert_eq!(s, "value1-suffix");
988 }
989
990 #[test]
991 fn interpolate_multiple() {
992 let mut target = TargetMap(BTreeMap::new());
993 target.0.insert("key1".to_string(), "value1".to_string());
994 target.0.insert("key2".to_string(), "value2".to_string());
995 let is = InterpolatedString(String::from("{{key1}}-{{key2}}"));
996
997 let s = is.interpolate(&target).unwrap();
998 assert_eq!(s, "value1-value2");
999 }
1000
1001 #[test]
1002 fn interpolate_missing_key() {
1003 let mut target = TargetMap(BTreeMap::new());
1004 target.0.insert("key1".to_string(), "value1".to_string());
1005 let is = InterpolatedString(String::from("{{key3}}"));
1006
1007 let err = is
1008 .interpolate(&target)
1009 .expect_err("Interpolating string should have failed");
1010 assert_eq!(
1011 err.to_string(),
1012 "Key 'key3' not found in target, but required in '{{key3}}'"
1013 );
1014 }
1015
1016 #[test]
1017 fn interpolate_missing_closing() {
1018 let mut target = TargetMap(BTreeMap::new());
1019 target.0.insert("key1".to_string(), "value1".to_string());
1020 let is = InterpolatedString(String::from("{{key1"));
1021
1022 let err = is
1023 .interpolate(&target)
1024 .expect_err("Interpolating string should have failed");
1025 assert_eq!(
1026 err.to_string(),
1027 "Missing closing '}}' character in '{{key1'"
1028 );
1029 }
1030
1031 #[test]
1037 fn interpolate_key_as_literal() {
1038 let mut target = TargetMap(BTreeMap::new());
1039 target.0.insert("oh{{no".to_string(), "value".to_string());
1040 let is = InterpolatedString(String::from("{{oh{{no}}"));
1041
1042 let s = is.interpolate(&target).unwrap();
1043 assert_eq!(s, "value");
1044 }
1045}