1use std::collections::{BTreeSet, HashMap};
2use std::fs::{self, File};
3use std::io::prelude::*;
4use std::io::SeekFrom;
5use std::path::{Path, PathBuf};
6use std::rc::Rc;
7use std::sync::Arc;
8use std::time::SystemTime;
9
10use flate2::read::GzDecoder;
11use flate2::{Compression, GzBuilder};
12use log::debug;
13use tar::{Archive, Builder, EntryType, Header};
14
15use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
16use crate::core::{Feature, Shell, Verbosity, Workspace};
17use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId};
18use crate::ops;
19use crate::sources::PathSource;
20use crate::util::errors::{CargoResult, CargoResultExt};
21use crate::util::paths;
22use crate::util::toml::TomlManifest;
23use crate::util::{self, restricted_names, Config, FileLock};
24
25pub struct PackageOpts<'cfg> {
26 pub config: &'cfg Config,
27 pub list: bool,
28 pub check_metadata: bool,
29 pub allow_dirty: bool,
30 pub verify: bool,
31 pub jobs: Option<u32>,
32 pub target: Option<String>,
33 pub features: Vec<String>,
34 pub all_features: bool,
35 pub no_default_features: bool,
36}
37
38const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
39
40struct ArchiveFile {
41 rel_path: PathBuf,
44 rel_str: String,
46 contents: FileContents,
48}
49
50enum FileContents {
51 OnDisk(PathBuf),
53 Generated(String),
55}
56
57pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option<FileLock>> {
58 if ws.root().join("Cargo.lock").exists() {
59 let _ = ops::resolve_ws(ws)?;
61 }
64 let pkg = ws.current()?;
65 let config = ws.config();
66
67 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
68 src.update()?;
69
70 if opts.check_metadata {
71 check_metadata(pkg, config)?;
72 }
73
74 verify_dependencies(pkg)?;
75
76 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
77 config.shell().warn(
78 "both package.include and package.exclude are specified; \
79 the exclude list will be ignored",
80 )?;
81 }
82 let src_files = src.list_files(pkg)?;
83
84 let vcs_info = if !opts.allow_dirty {
87 check_repo_state(pkg, &src_files, config)?
89 .map(|h| format!("{{\n \"git\": {{\n \"sha1\": \"{}\"\n }}\n}}\n", h))
90 } else {
91 None
92 };
93
94 let ar_files = build_ar_list(ws, pkg, src_files, vcs_info)?;
95
96 if opts.list {
97 for ar_file in ar_files {
98 println!("{}", ar_file.rel_str);
99 }
100 return Ok(None);
101 }
102
103 let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
104 let dir = ws.target_dir().join("package");
105 let mut dst = {
106 let tmp = format!(".{}", filename);
107 dir.open_rw(&tmp, config, "package scratch space")?
108 };
109
110 config
115 .shell()
116 .status("Packaging", pkg.package_id().to_string())?;
117 dst.file().set_len(0)?;
118 tar(ws, ar_files, dst.file(), &filename)
119 .chain_err(|| anyhow::format_err!("failed to prepare local package for uploading"))?;
120 if opts.verify {
121 dst.seek(SeekFrom::Start(0))?;
122 run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")?
123 }
124 dst.seek(SeekFrom::Start(0))?;
125 {
126 let src_path = dst.path();
127 let dst_path = dst.parent().join(&filename);
128 fs::rename(&src_path, &dst_path)
129 .chain_err(|| "failed to move temporary tarball into final location")?;
130 }
131 Ok(Some(dst))
132}
133
134fn build_ar_list(
136 ws: &Workspace<'_>,
137 pkg: &Package,
138 src_files: Vec<PathBuf>,
139 vcs_info: Option<String>,
140) -> CargoResult<Vec<ArchiveFile>> {
141 let mut result = Vec::new();
142 let root = pkg.root();
143 for src_file in src_files {
144 let rel_path = src_file.strip_prefix(&root)?.to_path_buf();
145 check_filename(&rel_path, &mut ws.config().shell())?;
146 let rel_str = rel_path
147 .to_str()
148 .ok_or_else(|| {
149 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
150 })?
151 .to_string();
152 match rel_str.as_ref() {
153 "Cargo.toml" => {
154 result.push(ArchiveFile {
155 rel_path: PathBuf::from("Cargo.toml.orig"),
156 rel_str: "Cargo.toml.orig".to_string(),
157 contents: FileContents::OnDisk(src_file),
158 });
159 let generated = pkg.to_registry_toml(ws.config())?;
160 result.push(ArchiveFile {
161 rel_path,
162 rel_str,
163 contents: FileContents::Generated(generated),
164 });
165 }
166 "Cargo.lock" => continue,
167 VCS_INFO_FILE => anyhow::bail!(
168 "invalid inclusion of reserved file name \
169 {} in package source",
170 VCS_INFO_FILE
171 ),
172 _ => {
173 result.push(ArchiveFile {
174 rel_path,
175 rel_str,
176 contents: FileContents::OnDisk(src_file),
177 });
178 }
179 }
180 }
181 if pkg.include_lockfile() {
182 let new_lock = build_lock(ws)?;
183 result.push(ArchiveFile {
184 rel_path: PathBuf::from("Cargo.lock"),
185 rel_str: "Cargo.lock".to_string(),
186 contents: FileContents::Generated(new_lock),
187 });
188 }
189 if let Some(vcs_info) = vcs_info {
190 result.push(ArchiveFile {
191 rel_path: PathBuf::from(VCS_INFO_FILE),
192 rel_str: VCS_INFO_FILE.to_string(),
193 contents: FileContents::Generated(vcs_info),
194 });
195 }
196 if let Some(license_file) = &pkg.manifest().metadata().license_file {
197 let license_path = Path::new(license_file);
198 let abs_license_path = paths::normalize_path(&pkg.root().join(license_path));
199 if abs_license_path.exists() {
200 match abs_license_path.strip_prefix(&pkg.root()) {
201 Ok(rel_license_path) => {
202 if !result.iter().any(|ar| ar.rel_path == rel_license_path) {
203 result.push(ArchiveFile {
204 rel_path: rel_license_path.to_path_buf(),
205 rel_str: rel_license_path
206 .to_str()
207 .expect("everything was utf8")
208 .to_string(),
209 contents: FileContents::OnDisk(abs_license_path),
210 });
211 }
212 }
213 Err(_) => {
214 let license_name = license_path.file_name().unwrap();
216 if result
217 .iter()
218 .any(|ar| ar.rel_path.file_name().unwrap() == license_name)
219 {
220 ws.config().shell().warn(&format!(
221 "license-file `{}` appears to be a path outside of the package, \
222 but there is already a file named `{}` in the root of the package. \
223 The archived crate will contain the copy in the root of the package. \
224 Update the license-file to point to the path relative \
225 to the root of the package to remove this warning.",
226 license_file,
227 license_name.to_str().unwrap()
228 ))?;
229 } else {
230 result.push(ArchiveFile {
231 rel_path: PathBuf::from(license_name),
232 rel_str: license_name.to_str().unwrap().to_string(),
233 contents: FileContents::OnDisk(abs_license_path),
234 });
235 }
236 }
237 }
238 } else {
239 let rel_msg = if license_path.is_absolute() {
240 "".to_string()
241 } else {
242 format!(" (relative to `{}`)", pkg.root().display())
243 };
244 ws.config().shell().warn(&format!(
245 "license-file `{}` does not appear to exist{}.\n\
246 Please update the license-file setting in the manifest at `{}`\n\
247 This may become a hard error in the future.",
248 license_path.display(),
249 rel_msg,
250 pkg.manifest_path().display()
251 ))?;
252 }
253 }
254 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
255
256 Ok(result)
257}
258
259fn build_lock(ws: &Workspace<'_>) -> CargoResult<String> {
261 let config = ws.config();
262 let orig_resolve = ops::load_pkg_lockfile(ws)?;
263
264 let orig_pkg = ws.current()?;
266 let toml_manifest = Rc::new(
267 orig_pkg
268 .manifest()
269 .original()
270 .prepare_for_publish(config, orig_pkg.root())?,
271 );
272 let package_root = orig_pkg.root();
273 let source_id = orig_pkg.package_id().source_id();
274 let (manifest, _nested_paths) =
275 TomlManifest::to_real_manifest(&toml_manifest, source_id, package_root, config)?;
276 let new_pkg = Package::new(manifest, orig_pkg.manifest_path());
277
278 let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?;
280 let (pkg_set, new_resolve) = ops::resolve_ws(&tmp_ws)?;
281
282 if let Some(orig_resolve) = orig_resolve {
283 compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
284 }
285 check_yanked(config, &pkg_set, &new_resolve)?;
286
287 ops::resolve_to_string(&tmp_ws, &new_resolve)
288}
289
290fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
293 let md = pkg.manifest().metadata();
294
295 let mut missing = vec![];
296
297 macro_rules! lacking {
298 ($( $($field: ident)||* ),*) => {{
299 $(
300 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
301 $(missing.push(stringify!($field).replace("_", "-"));)*
302 }
303 )*
304 }}
305 }
306 lacking!(
307 description,
308 license || license_file,
309 documentation || homepage || repository
310 );
311
312 if !missing.is_empty() {
313 let mut things = missing[..missing.len() - 1].join(", ");
314 if !things.is_empty() {
317 things.push_str(" or ");
318 }
319 things.push_str(missing.last().unwrap());
320
321 config.shell().warn(&format!(
322 "manifest has no {things}.\n\
323 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
324 things = things
325 ))?
326 }
327
328 Ok(())
329}
330
331fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
333 for dep in pkg.dependencies() {
334 if dep.source_id().is_path() && !dep.specified_req() && dep.is_transitive() {
335 anyhow::bail!(
336 "all path dependencies must have a version specified \
337 when packaging.\ndependency `{}` does not specify \
338 a version.",
339 dep.name_in_toml()
340 )
341 }
342 }
343 Ok(())
344}
345
346fn check_repo_state(
351 p: &Package,
352 src_files: &[PathBuf],
353 config: &Config,
354) -> CargoResult<Option<String>> {
355 if let Ok(repo) = git2::Repository::discover(p.root()) {
356 if let Some(workdir) = repo.workdir() {
357 debug!("found a git repo at {:?}", workdir);
358 let path = p.manifest_path();
359 let path = path.strip_prefix(workdir).unwrap_or(path);
360 if let Ok(status) = repo.status_file(path) {
361 if (status & git2::Status::IGNORED).is_empty() {
362 debug!(
363 "found (git) Cargo.toml at {:?} in workdir {:?}",
364 path, workdir
365 );
366 return git(p, src_files, &repo);
367 }
368 }
369 config.shell().verbose(|shell| {
370 shell.warn(format!(
371 "No (git) Cargo.toml found at `{}` in workdir `{}`",
372 path.display(),
373 workdir.display()
374 ))
375 })?;
376 }
377 } else {
378 config.shell().verbose(|shell| {
379 shell.warn(format!("No (git) VCS found for `{}`", p.root().display()))
380 })?;
381 }
382
383 return Ok(None);
386
387 fn git(
388 p: &Package,
389 src_files: &[PathBuf],
390 repo: &git2::Repository,
391 ) -> CargoResult<Option<String>> {
392 let workdir = repo.workdir().unwrap();
393
394 let mut sub_repos = Vec::new();
395 open_submodules(repo, &mut sub_repos)?;
396 sub_repos.sort_unstable_by(|a, b| b.0.as_os_str().len().cmp(&a.0.as_os_str().len()));
398 let submodule_dirty = |path: &Path| -> bool {
399 sub_repos
400 .iter()
401 .filter(|(sub_path, _sub_repo)| path.starts_with(sub_path))
402 .any(|(sub_path, sub_repo)| {
403 let relative = path.strip_prefix(sub_path).unwrap();
404 sub_repo
405 .status_file(relative)
406 .map(|status| status != git2::Status::CURRENT)
407 .unwrap_or(false)
408 })
409 };
410
411 let dirty = src_files
412 .iter()
413 .filter(|file| {
414 let relative = file.strip_prefix(workdir).unwrap();
415 if let Ok(status) = repo.status_file(relative) {
416 if status == git2::Status::CURRENT {
417 false
418 } else if relative.file_name().and_then(|s| s.to_str()).unwrap_or("")
419 == "Cargo.lock"
420 {
421 status != git2::Status::IGNORED
423 } else {
424 true
425 }
426 } else {
427 submodule_dirty(file)
428 }
429 })
430 .map(|path| {
431 path.strip_prefix(p.root())
432 .unwrap_or(path)
433 .display()
434 .to_string()
435 })
436 .collect::<Vec<_>>();
437 if dirty.is_empty() {
438 let rev_obj = repo.revparse_single("HEAD")?;
439 Ok(Some(rev_obj.id().to_string()))
440 } else {
441 anyhow::bail!(
442 "{} files in the working directory contain changes that were \
443 not yet committed into git:\n\n{}\n\n\
444 to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag",
445 dirty.len(),
446 dirty.join("\n")
447 )
448 }
449 }
450
451 fn open_submodules(
453 repo: &git2::Repository,
454 sub_repos: &mut Vec<(PathBuf, git2::Repository)>,
455 ) -> CargoResult<()> {
456 for submodule in repo.submodules()? {
457 if let Ok(sub_repo) = submodule.open() {
460 open_submodules(&sub_repo, sub_repos)?;
461 sub_repos.push((sub_repo.workdir().unwrap().to_owned(), sub_repo));
462 }
463 }
464 Ok(())
465 }
466}
467
468fn tar(
469 ws: &Workspace<'_>,
470 ar_files: Vec<ArchiveFile>,
471 dst: &File,
472 filename: &str,
473) -> CargoResult<()> {
474 let filename = Path::new(filename);
476 let encoder = GzBuilder::new()
477 .filename(util::path2bytes(filename)?)
478 .write(dst, Compression::best());
479
480 let mut ar = Builder::new(encoder);
482 let pkg = ws.current()?;
483 let config = ws.config();
484
485 let base_name = format!("{}-{}", pkg.name(), pkg.version());
486 let base_path = Path::new(&base_name);
487 for ar_file in ar_files {
488 let ArchiveFile {
489 rel_path,
490 rel_str,
491 contents,
492 } = ar_file;
493 let ar_path = base_path.join(&rel_path);
494 config
495 .shell()
496 .verbose(|shell| shell.status("Archiving", &rel_str))?;
497 let mut header = Header::new_ustar();
516 header
517 .set_path(&ar_path)
518 .chain_err(|| format!("failed to add to archive: `{}`", rel_str))?;
519 match contents {
520 FileContents::OnDisk(disk_path) => {
521 let mut file = File::open(&disk_path).chain_err(|| {
522 format!("failed to open for archiving: `{}`", disk_path.display())
523 })?;
524 let metadata = file.metadata().chain_err(|| {
525 format!("could not learn metadata for: `{}`", disk_path.display())
526 })?;
527 header.set_metadata(&metadata);
528 header.set_cksum();
529 ar.append(&header, &mut file).chain_err(|| {
530 format!("could not archive source file `{}`", disk_path.display())
531 })?;
532 }
533 FileContents::Generated(contents) => {
534 header.set_entry_type(EntryType::file());
535 header.set_mode(0o644);
536 header.set_mtime(
537 SystemTime::now()
538 .duration_since(SystemTime::UNIX_EPOCH)
539 .unwrap()
540 .as_secs(),
541 );
542 header.set_size(contents.len() as u64);
543 header.set_cksum();
544 ar.append(&header, contents.as_bytes())
545 .chain_err(|| format!("could not archive source file `{}`", rel_str))?;
546 }
547 }
548 }
549
550 let encoder = ar.into_inner()?;
551 encoder.finish()?;
552 Ok(())
553}
554
555fn compare_resolve(
557 config: &Config,
558 current_pkg: &Package,
559 orig_resolve: &Resolve,
560 new_resolve: &Resolve,
561) -> CargoResult<()> {
562 if config.shell().verbosity() != Verbosity::Verbose {
563 return Ok(());
564 }
565 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
566 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
567 let added = new_set.difference(&orig_set);
568 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
571 for pkg_id in added {
572 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
573 continue;
576 }
577 let removed_candidates: Vec<&PackageId> = removed
580 .iter()
581 .filter(|orig_pkg_id| {
582 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
583 })
584 .cloned()
585 .collect();
586 let extra = match removed_candidates.len() {
587 0 => {
588 let previous_versions: Vec<&PackageId> = removed
590 .iter()
591 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
592 .cloned()
593 .collect();
594 match previous_versions.len() {
595 0 => String::new(),
596 1 => format!(
597 ", previous version was `{}`",
598 previous_versions[0].version()
599 ),
600 _ => format!(
601 ", previous versions were: {}",
602 previous_versions
603 .iter()
604 .map(|pkg_id| format!("`{}`", pkg_id.version()))
605 .collect::<Vec<_>>()
606 .join(", ")
607 ),
608 }
609 }
610 1 => {
611 format!(
615 ", was originally sourced from `{}`",
616 removed_candidates[0].source_id()
617 )
618 }
619 _ => {
620 let comma_list = removed_candidates
623 .iter()
624 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
625 .collect::<Vec<_>>()
626 .join(", ");
627 format!(
628 ", was originally sourced from one of these sources: {}",
629 comma_list
630 )
631 }
632 };
633 let msg = format!(
634 "package `{}` added to the packaged Cargo.lock file{}",
635 pkg_id, extra
636 );
637 config.shell().note(msg)?;
638 }
639 Ok(())
640}
641
642fn check_yanked(config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve) -> CargoResult<()> {
643 let _lock = config.acquire_package_cache_lock()?;
646
647 let mut sources = pkg_set.sources_mut();
648 for pkg_id in resolve.iter() {
649 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
650 if source.is_yanked(pkg_id)? {
651 config.shell().warn(format!(
652 "package `{}` in Cargo.lock is yanked in registry `{}`, \
653 consider updating to a version that is not yanked",
654 pkg_id,
655 pkg_id.source_id().display_registry_name()
656 ))?;
657 }
658 }
659 }
660 Ok(())
661}
662
663fn run_verify(ws: &Workspace<'_>, tar: &FileLock, opts: &PackageOpts<'_>) -> CargoResult<()> {
664 let config = ws.config();
665 let pkg = ws.current()?;
666
667 config.shell().status("Verifying", pkg)?;
668
669 let f = GzDecoder::new(tar.file());
670 let dst = tar
671 .parent()
672 .join(&format!("{}-{}", pkg.name(), pkg.version()));
673 if dst.exists() {
674 paths::remove_dir_all(&dst)?;
675 }
676 let mut archive = Archive::new(f);
677 archive.set_preserve_mtime(false);
680 archive.unpack(dst.parent().unwrap())?;
681
682 let id = SourceId::for_path(&dst)?;
685 let mut src = PathSource::new(&dst, id, ws.config());
686 let new_pkg = src.root_package()?;
687 let pkg_fingerprint = hash_all(&dst)?;
688 let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
689
690 let rustc_args = if pkg
691 .manifest()
692 .features()
693 .require(Feature::public_dependency())
694 .is_ok()
695 {
696 Some(vec![])
699 } else {
700 None
701 };
702
703 let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
704 ops::compile_with_exec(
705 &ws,
706 &ops::CompileOptions {
707 build_config: BuildConfig::new(config, opts.jobs, &opts.target, CompileMode::Build)?,
708 features: opts.features.clone(),
709 no_default_features: opts.no_default_features,
710 all_features: opts.all_features,
711 spec: ops::Packages::Packages(Vec::new()),
712 filter: ops::CompileFilter::Default {
713 required_features_filterable: true,
714 },
715 deps_only: false,
716 deps_remote_only: false,
717 target_rustdoc_args: None,
718 target_rustc_args: rustc_args,
719 local_rustdoc_args: None,
720 rustdoc_document_private_items: false,
721 export_dir: None,
722 },
723 &exec,
724 )?;
725
726 let ws_fingerprint = hash_all(&dst)?;
728 if pkg_fingerprint != ws_fingerprint {
729 let changes = report_hash_difference(&pkg_fingerprint, &ws_fingerprint);
730 anyhow::bail!(
731 "Source directory was modified by build.rs during cargo publish. \
732 Build scripts should not modify anything outside of OUT_DIR.\n\
733 {}\n\n\
734 To proceed despite this, pass the `--no-verify` flag.",
735 changes
736 )
737 }
738
739 Ok(())
740}
741
742fn hash_all(path: &Path) -> CargoResult<HashMap<PathBuf, u64>> {
743 fn wrap(path: &Path) -> CargoResult<HashMap<PathBuf, u64>> {
744 let mut result = HashMap::new();
745 let walker = walkdir::WalkDir::new(path).into_iter();
746 for entry in walker.filter_entry(|e| !(e.depth() == 1 && e.file_name() == "target")) {
747 let entry = entry?;
748 let file_type = entry.file_type();
749 if file_type.is_file() {
750 let file = File::open(entry.path())?;
751 let hash = util::hex::hash_u64_file(&file)?;
752 result.insert(entry.path().to_path_buf(), hash);
753 } else if file_type.is_symlink() {
754 let hash = util::hex::hash_u64(&fs::read_link(entry.path())?);
755 result.insert(entry.path().to_path_buf(), hash);
756 } else if file_type.is_dir() {
757 let hash = util::hex::hash_u64(&());
758 result.insert(entry.path().to_path_buf(), hash);
759 }
760 }
761 Ok(result)
762 }
763 let result = wrap(path).chain_err(|| format!("failed to verify output at {:?}", path))?;
764 Ok(result)
765}
766
767fn report_hash_difference(orig: &HashMap<PathBuf, u64>, after: &HashMap<PathBuf, u64>) -> String {
768 let mut changed = Vec::new();
769 let mut removed = Vec::new();
770 for (key, value) in orig {
771 match after.get(key) {
772 Some(after_value) => {
773 if value != after_value {
774 changed.push(key.to_string_lossy());
775 }
776 }
777 None => removed.push(key.to_string_lossy()),
778 }
779 }
780 let mut added: Vec<_> = after
781 .keys()
782 .filter(|key| !orig.contains_key(*key))
783 .map(|key| key.to_string_lossy())
784 .collect();
785 let mut result = Vec::new();
786 if !changed.is_empty() {
787 changed.sort_unstable();
788 result.push(format!("Changed: {}", changed.join("\n\t")));
789 }
790 if !added.is_empty() {
791 added.sort_unstable();
792 result.push(format!("Added: {}", added.join("\n\t")));
793 }
794 if !removed.is_empty() {
795 removed.sort_unstable();
796 result.push(format!("Removed: {}", removed.join("\n\t")));
797 }
798 assert!(!result.is_empty(), "unexpected empty change detection");
799 result.join("\n")
800}
801
802fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
809 let name = match file.file_name() {
810 Some(name) => name,
811 None => return Ok(()),
812 };
813 let name = match name.to_str() {
814 Some(name) => name,
815 None => anyhow::bail!(
816 "path does not have a unicode filename which may not unpack \
817 on all platforms: {}",
818 file.display()
819 ),
820 };
821 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
822 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
823 anyhow::bail!(
824 "cannot package a filename with a special character `{}`: {}",
825 c,
826 file.display()
827 )
828 }
829 let mut check_windows = |name| -> CargoResult<()> {
830 if restricted_names::is_windows_reserved(name) {
831 shell.warn(format!(
832 "file {} is a reserved Windows filename, \
833 it will not work on Windows platforms",
834 file.display()
835 ))?;
836 }
837 Ok(())
838 };
839 for component in file.iter() {
840 if let Some(component) = component.to_str() {
841 check_windows(component)?;
842 }
843 }
844 if file.extension().is_some() {
845 if let Some(stem) = file.file_stem().and_then(|s| s.to_str()) {
846 check_windows(stem)?;
847 }
848 }
849 Ok(())
850}