1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::{self, File};
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::registry::index::{IndexPackage, RegistryDependency};
23use crate::sources::{CRATES_IO_REGISTRY, PathSource};
24use crate::util::FileLock;
25use crate::util::Filesystem;
26use crate::util::GlobalContext;
27use crate::util::Graph;
28use crate::util::HumanBytes;
29use crate::util::cache_lock::CacheLockMode;
30use crate::util::context::JobsConfig;
31use crate::util::errors::CargoResult;
32use crate::util::errors::ManifestError;
33use crate::util::restricted_names;
34use crate::util::toml::prepare_for_publish;
35use crate::{drop_println, ops};
36use anyhow::{Context as _, bail};
37use cargo_util::paths;
38use cargo_util_schemas::messages;
39use flate2::{Compression, GzBuilder};
40use tar::{Builder, EntryType, Header, HeaderMode};
41use tracing::debug;
42use unicase::Ascii as UncasedAscii;
43
44mod vcs;
45mod verify;
46
47#[derive(Debug, Clone)]
51pub enum PackageMessageFormat {
52 Human,
53 Json,
54}
55
56impl PackageMessageFormat {
57 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
58
59 pub const DEFAULT: &str = "human";
60}
61
62impl std::str::FromStr for PackageMessageFormat {
63 type Err = anyhow::Error;
64
65 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
66 match s {
67 "human" => Ok(PackageMessageFormat::Human),
68 "json" => Ok(PackageMessageFormat::Json),
69 f => bail!("unknown message format `{f}`"),
70 }
71 }
72}
73
74#[derive(Clone)]
75pub struct PackageOpts<'gctx> {
76 pub gctx: &'gctx GlobalContext,
77 pub list: bool,
78 pub fmt: PackageMessageFormat,
79 pub check_metadata: bool,
80 pub allow_dirty: bool,
81 pub include_lockfile: bool,
82 pub verify: bool,
83 pub jobs: Option<JobsConfig>,
84 pub keep_going: bool,
85 pub to_package: ops::Packages,
86 pub targets: Vec<String>,
87 pub cli_features: CliFeatures,
88 pub reg_or_index: Option<ops::RegistryOrIndex>,
89 pub dry_run: bool,
102}
103
104const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
105const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
106
107struct ArchiveFile {
108 rel_path: PathBuf,
111 rel_str: String,
113 contents: FileContents,
115}
116
117enum FileContents {
118 OnDisk(PathBuf),
120 Generated(GeneratedFile),
122}
123
124enum GeneratedFile {
125 Manifest(PathBuf),
129 Lockfile(Option<PathBuf>),
133 VcsInfo(vcs::VcsInfo),
135}
136
137#[tracing::instrument(skip_all)]
139fn create_package(
140 ws: &Workspace<'_>,
141 opts: &PackageOpts<'_>,
142 pkg: &Package,
143 ar_files: Vec<ArchiveFile>,
144 local_reg: Option<&TmpRegistry<'_>>,
145) -> CargoResult<FileLock> {
146 let gctx = ws.gctx();
147 let filecount = ar_files.len();
148
149 for dep in pkg.dependencies() {
151 super::check_dep_has_version(dep, false).map_err(|err| {
152 ManifestError::new(
153 err.context(format!(
154 "failed to verify manifest at `{}`",
155 pkg.manifest_path().display()
156 )),
157 pkg.manifest_path().into(),
158 )
159 })?;
160 }
161
162 let filename = pkg.package_id().tarball_name();
163 let dir = ws.target_dir().join("package");
164 let mut dst = {
165 let tmp = format!(".{}", filename);
166 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
167 };
168
169 gctx.shell()
174 .status("Packaging", pkg.package_id().to_string())?;
175 dst.file().set_len(0)?;
176 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
177 .context("failed to prepare local package for uploading")?;
178
179 dst.seek(SeekFrom::Start(0))?;
180 let src_path = dst.path();
181 let dst_path = dst.parent().join(&filename);
182 fs::rename(&src_path, &dst_path)
183 .context("failed to move temporary tarball into final location")?;
184
185 let dst_metadata = dst
186 .file()
187 .metadata()
188 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
189 let compressed_size = dst_metadata.len();
190
191 let uncompressed = HumanBytes(uncompressed_size);
192 let compressed = HumanBytes(compressed_size);
193
194 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
195 drop(gctx.shell().status("Packaged", message));
197
198 return Ok(dst);
199}
200
201pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
206 let specs = &opts.to_package.to_package_id_specs(ws)?;
207 if let ops::Packages::Packages(_) = opts.to_package {
209 for spec in specs.iter() {
210 let member_ids = ws.members().map(|p| p.package_id());
211 spec.query(member_ids)?;
212 }
213 }
214 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
215
216 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
219
220 Ok(do_package(ws, opts, pkgs)?
221 .into_iter()
222 .map(|x| x.2)
223 .collect())
224}
225
226pub(crate) fn package_with_dep_graph(
232 ws: &Workspace<'_>,
233 opts: &PackageOpts<'_>,
234 pkgs: Vec<(&Package, CliFeatures)>,
235) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
236 let output = do_package(ws, opts, pkgs)?;
237
238 Ok(local_deps(output.into_iter().map(
239 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
240 )))
241}
242
243fn do_package<'a>(
244 ws: &Workspace<'_>,
245 opts: &PackageOpts<'a>,
246 pkgs: Vec<(&Package, CliFeatures)>,
247) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
248 if ws
249 .lock_root()
250 .as_path_unlocked()
251 .join(LOCKFILE_NAME)
252 .exists()
253 && opts.include_lockfile
254 {
255 let dry_run = false;
257 let _ = ops::resolve_ws(ws, dry_run)?;
258 }
261
262 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
263 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
264
265 let mut local_reg = {
266 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
271 || opts.reg_or_index.is_some()
272 {
273 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
274 debug!("packaging for registry {}", sid);
275 Some(sid)
276 } else {
277 None
278 };
279 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
280 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
281 .transpose()?
282 };
283
284 let sorted_pkgs = deps.sort();
287 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
288 for (pkg, cli_features) in sorted_pkgs {
289 let opts = PackageOpts {
290 cli_features: cli_features.clone(),
291 to_package: ops::Packages::Default,
292 ..opts.clone()
293 };
294 let ar_files = prepare_archive(ws, &pkg, &opts)?;
295
296 if opts.list {
297 match opts.fmt {
298 PackageMessageFormat::Human => {
299 for ar_file in &ar_files {
302 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
303 }
304 }
305 PackageMessageFormat::Json => {
306 let message = messages::PackageList {
307 id: pkg.package_id().to_spec(),
308 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
309 let file = match f.contents {
310 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
311 FileContents::Generated(
312 GeneratedFile::Manifest(path)
313 | GeneratedFile::Lockfile(Some(path)),
314 ) => messages::PackageFile::Generate { path: Some(path) },
315 FileContents::Generated(
316 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
317 ) => messages::PackageFile::Generate { path: None },
318 };
319 (f.rel_path, file)
320 })),
321 };
322 let _ = ws.gctx().shell().print_json(&message);
323 }
324 }
325 } else {
326 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
327 if let Some(local_reg) = local_reg.as_mut() {
328 if pkg.publish() != &Some(Vec::new()) {
329 local_reg.add_package(ws, &pkg, &tarball)?;
330 }
331 }
332 outputs.push((pkg, opts, tarball));
333 }
334 }
335
336 if opts.verify {
339 for (pkg, opts, tarball) in &outputs {
340 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
341 .context("failed to verify package tarball")?
342 }
343 }
344
345 Ok(outputs)
346}
347
348fn get_registry(
355 gctx: &GlobalContext,
356 pkgs: &[&Package],
357 reg_or_index: Option<RegistryOrIndex>,
358) -> CargoResult<SourceId> {
359 let reg_or_index = match reg_or_index.clone() {
360 Some(r) => Some(r),
361 None => infer_registry(pkgs)?,
362 };
363
364 let reg = reg_or_index
366 .clone()
367 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
368 if let RegistryOrIndex::Registry(reg_name) = reg {
369 for pkg in pkgs {
370 if let Some(allowed) = pkg.publish().as_ref() {
371 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
375 bail!(
376 "`{}` cannot be packaged.\n\
377 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
378 pkg.name(),
379 reg_name
380 );
381 }
382 }
383 }
384 }
385 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
386}
387
388#[derive(Clone, Debug, Default)]
390pub(crate) struct LocalDependencies<T> {
391 pub packages: HashMap<PackageId, (Package, T)>,
392 pub graph: Graph<PackageId, ()>,
393}
394
395impl<T: Clone> LocalDependencies<T> {
396 pub fn sort(&self) -> Vec<(Package, T)> {
397 self.graph
398 .sort()
399 .into_iter()
400 .map(|name| self.packages[&name].clone())
401 .collect()
402 }
403
404 pub fn has_dependencies(&self) -> bool {
405 self.graph
406 .iter()
407 .any(|node| self.graph.edges(node).next().is_some())
408 }
409}
410
411fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
416 let packages: HashMap<PackageId, (Package, T)> = packages
417 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
418 .collect();
419
420 let source_to_pkg: HashMap<_, _> = packages
425 .keys()
426 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
427 .collect();
428
429 let mut graph = Graph::new();
430 for (pkg, _payload) in packages.values() {
431 graph.add(pkg.package_id());
432 for dep in pkg.dependencies() {
433 if !dep.source_id().is_path() {
435 continue;
436 }
437
438 if dep.kind() == DepKind::Development && !dep.specified_req() {
441 continue;
442 };
443
444 if dep.source_id() == pkg.package_id().source_id() {
446 continue;
447 }
448
449 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
450 graph.link(pkg.package_id(), *dep_pkg);
451 }
452 }
453 }
454
455 LocalDependencies { packages, graph }
456}
457
458#[tracing::instrument(skip_all)]
460fn prepare_archive(
461 ws: &Workspace<'_>,
462 pkg: &Package,
463 opts: &PackageOpts<'_>,
464) -> CargoResult<Vec<ArchiveFile>> {
465 let gctx = ws.gctx();
466 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
467 src.load()?;
468
469 if opts.check_metadata {
470 check_metadata(pkg, gctx)?;
471 }
472
473 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
474 gctx.shell().warn(
475 "both package.include and package.exclude are specified; \
476 the exclude list will be ignored",
477 )?;
478 }
479 let src_files = src.list_files(pkg)?;
480
481 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
483
484 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
485}
486
487#[tracing::instrument(skip_all)]
489fn build_ar_list(
490 ws: &Workspace<'_>,
491 pkg: &Package,
492 src_files: Vec<PathEntry>,
493 vcs_info: Option<vcs::VcsInfo>,
494 include_lockfile: bool,
495) -> CargoResult<Vec<ArchiveFile>> {
496 let mut result = HashMap::new();
497 let root = pkg.root();
498 for src_file in &src_files {
499 let rel_path = src_file.strip_prefix(&root)?;
500 check_filename(rel_path, &mut ws.gctx().shell())?;
501 let rel_str = rel_path.to_str().ok_or_else(|| {
502 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
503 })?;
504 match rel_str {
505 "Cargo.lock" => continue,
506 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
507 "invalid inclusion of reserved file name {} in package source",
508 rel_str
509 ),
510 _ => {
511 result
512 .entry(UncasedAscii::new(rel_str))
513 .or_insert_with(Vec::new)
514 .push(ArchiveFile {
515 rel_path: rel_path.to_owned(),
516 rel_str: rel_str.to_owned(),
517 contents: FileContents::OnDisk(src_file.to_path_buf()),
518 });
519 }
520 }
521 }
522
523 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
526 result
527 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
528 .or_insert_with(Vec::new)
529 .push(ArchiveFile {
530 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
531 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
532 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
533 });
534 result
535 .entry(UncasedAscii::new("Cargo.toml"))
536 .or_insert_with(Vec::new)
537 .push(ArchiveFile {
538 rel_path: PathBuf::from("Cargo.toml"),
539 rel_str: "Cargo.toml".to_string(),
540 contents: FileContents::Generated(GeneratedFile::Manifest(
541 pkg.manifest_path().to_owned(),
542 )),
543 });
544 } else {
545 ws.gctx().shell().warn(&format!(
546 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
547 pkg.name()
548 ))?;
549 }
550
551 if include_lockfile {
552 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
553 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
554 let rel_str = "Cargo.lock";
555 result
556 .entry(UncasedAscii::new(rel_str))
557 .or_insert_with(Vec::new)
558 .push(ArchiveFile {
559 rel_path: PathBuf::from(rel_str),
560 rel_str: rel_str.to_string(),
561 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
562 });
563 }
564
565 if let Some(vcs_info) = vcs_info {
566 let rel_str = VCS_INFO_FILE;
567 result
568 .entry(UncasedAscii::new(rel_str))
569 .or_insert_with(Vec::new)
570 .push(ArchiveFile {
571 rel_path: PathBuf::from(rel_str),
572 rel_str: rel_str.to_string(),
573 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
574 });
575 }
576
577 let mut invalid_manifest_field: Vec<String> = vec![];
578
579 let mut result = result.into_values().flatten().collect();
580 if let Some(license_file) = &pkg.manifest().metadata().license_file {
581 let license_path = Path::new(license_file);
582 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
583 if abs_file_path.is_file() {
584 check_for_file_and_add(
585 "license-file",
586 license_path,
587 abs_file_path,
588 pkg,
589 &mut result,
590 ws,
591 )?;
592 } else {
593 error_on_nonexistent_file(
594 &pkg,
595 &license_path,
596 "license-file",
597 &mut invalid_manifest_field,
598 );
599 }
600 }
601 if let Some(readme) = &pkg.manifest().metadata().readme {
602 let readme_path = Path::new(readme);
603 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
604 if abs_file_path.is_file() {
605 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
606 } else {
607 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
608 }
609 }
610
611 if !invalid_manifest_field.is_empty() {
612 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
613 }
614
615 for t in pkg
616 .manifest()
617 .targets()
618 .iter()
619 .filter(|t| t.is_custom_build())
620 {
621 if let Some(custome_build_path) = t.src_path().path() {
622 let abs_custome_build_path =
623 paths::normalize_path(&pkg.root().join(custome_build_path));
624 if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
625 {
626 error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
627 }
628 }
629 }
630
631 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
632
633 Ok(result)
634}
635
636fn check_for_file_and_add(
637 label: &str,
638 file_path: &Path,
639 abs_file_path: PathBuf,
640 pkg: &Package,
641 result: &mut Vec<ArchiveFile>,
642 ws: &Workspace<'_>,
643) -> CargoResult<()> {
644 match abs_file_path.strip_prefix(&pkg.root()) {
645 Ok(rel_file_path) => {
646 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
647 result.push(ArchiveFile {
648 rel_path: rel_file_path.to_path_buf(),
649 rel_str: rel_file_path
650 .to_str()
651 .expect("everything was utf8")
652 .to_string(),
653 contents: FileContents::OnDisk(abs_file_path),
654 })
655 }
656 }
657 Err(_) => {
658 let file_name = file_path.file_name().unwrap();
660 if result.iter().any(|ar| ar.rel_path == file_name) {
661 ws.gctx().shell().warn(&format!(
662 "{} `{}` appears to be a path outside of the package, \
663 but there is already a file named `{}` in the root of the package. \
664 The archived crate will contain the copy in the root of the package. \
665 Update the {} to point to the path relative \
666 to the root of the package to remove this warning.",
667 label,
668 file_path.display(),
669 file_name.to_str().unwrap(),
670 label,
671 ))?;
672 } else {
673 result.push(ArchiveFile {
674 rel_path: PathBuf::from(file_name),
675 rel_str: file_name.to_str().unwrap().to_string(),
676 contents: FileContents::OnDisk(abs_file_path),
677 })
678 }
679 }
680 }
681 Ok(())
682}
683
684fn error_on_nonexistent_file(
685 pkg: &Package,
686 path: &Path,
687 manifest_key_name: &'static str,
688 invalid: &mut Vec<String>,
689) {
690 let rel_msg = if path.is_absolute() {
691 "".to_string()
692 } else {
693 format!(" (relative to `{}`)", pkg.root().display())
694 };
695
696 let msg = format!(
697 "{manifest_key_name} `{}` does not appear to exist{}.\n\
698 Please update the {manifest_key_name} setting in the manifest at `{}`.",
699 path.display(),
700 rel_msg,
701 pkg.manifest_path().display()
702 );
703
704 invalid.push(msg);
705}
706
707fn error_custom_build_file_not_in_package(
708 pkg: &Package,
709 path: &Path,
710 target: &Target,
711) -> CargoResult<Vec<ArchiveFile>> {
712 let tip = {
713 let description_name = target.description_named();
714 if path.is_file() {
715 format!(
716 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
717 It is at `{}`, whereas the root the package is `{}`.\n",
718 path.display(),
719 pkg.root().display()
720 )
721 } else {
722 format!("the source file of {description_name} doesn't appear to exist.\n",)
723 }
724 };
725 let msg = format!(
726 "{}\
727 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
728 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
729 tip,
730 pkg.manifest_path().display()
731 );
732 anyhow::bail!(msg)
733}
734
735fn build_lock(
737 ws: &Workspace<'_>,
738 opts: &PackageOpts<'_>,
739 publish_pkg: &Package,
740 local_reg: Option<&TmpRegistry<'_>>,
741) -> CargoResult<String> {
742 let gctx = ws.gctx();
743 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
744
745 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
746
747 if let Some(local_reg) = local_reg {
751 tmp_ws.add_local_overlay(
752 local_reg.upstream,
753 local_reg.root.as_path_unlocked().to_owned(),
754 );
755 if opts.dry_run {
756 if let Some(orig_resolve) = orig_resolve.as_mut() {
757 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
758 SourceId::crates_io(gctx)?
759 } else {
760 local_reg.upstream
761 };
762 for (p, s) in local_reg.checksums() {
763 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
764 }
765 }
766 }
767 }
768 let mut tmp_reg = tmp_ws.package_registry()?;
769
770 let mut new_resolve = ops::resolve_with_previous(
771 &mut tmp_reg,
772 &tmp_ws,
773 &CliFeatures::new_all(true),
774 HasDevUnits::Yes,
775 orig_resolve.as_ref(),
776 None,
777 &[],
778 true,
779 )?;
780
781 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
782
783 if let Some(orig_resolve) = orig_resolve {
784 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
785 }
786 check_yanked(
787 gctx,
788 &pkg_set,
789 &new_resolve,
790 "consider updating to a version that is not yanked",
791 )?;
792
793 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
794}
795
796fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
799 let md = pkg.manifest().metadata();
800
801 let mut missing = vec![];
802
803 macro_rules! lacking {
804 ($( $($field: ident)||* ),*) => {{
805 $(
806 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
807 $(missing.push(stringify!($field).replace("_", "-"));)*
808 }
809 )*
810 }}
811 }
812 lacking!(
813 description,
814 license || license_file,
815 documentation || homepage || repository
816 );
817
818 if !missing.is_empty() {
819 let mut things = missing[..missing.len() - 1].join(", ");
820 if !things.is_empty() {
823 things.push_str(" or ");
824 }
825 things.push_str(missing.last().unwrap());
826
827 gctx.shell().warn(&format!(
828 "manifest has no {things}.\n\
829 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
830 things = things
831 ))?
832 }
833
834 Ok(())
835}
836
837fn tar(
841 ws: &Workspace<'_>,
842 opts: &PackageOpts<'_>,
843 pkg: &Package,
844 local_reg: Option<&TmpRegistry<'_>>,
845 ar_files: Vec<ArchiveFile>,
846 dst: &File,
847 filename: &str,
848) -> CargoResult<u64> {
849 let filename = Path::new(filename);
851 let encoder = GzBuilder::new()
852 .filename(paths::path2bytes(filename)?)
853 .write(dst, Compression::best());
854
855 let mut ar = Builder::new(encoder);
857 ar.sparse(false);
858 let gctx = ws.gctx();
859
860 let base_name = format!("{}-{}", pkg.name(), pkg.version());
861 let base_path = Path::new(&base_name);
862 let included = ar_files
863 .iter()
864 .map(|ar_file| ar_file.rel_path.clone())
865 .collect::<Vec<_>>();
866 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
867
868 let mut uncompressed_size = 0;
869 for ar_file in ar_files {
870 let ArchiveFile {
871 rel_path,
872 rel_str,
873 contents,
874 } = ar_file;
875 let ar_path = base_path.join(&rel_path);
876 gctx.shell()
877 .verbose(|shell| shell.status("Archiving", &rel_str))?;
878 let mut header = Header::new_gnu();
879 match contents {
880 FileContents::OnDisk(disk_path) => {
881 let mut file = File::open(&disk_path).with_context(|| {
882 format!("failed to open for archiving: `{}`", disk_path.display())
883 })?;
884 let metadata = file.metadata().with_context(|| {
885 format!("could not learn metadata for: `{}`", disk_path.display())
886 })?;
887 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
888 header.set_cksum();
889 ar.append_data(&mut header, &ar_path, &mut file)
890 .with_context(|| {
891 format!("could not archive source file `{}`", disk_path.display())
892 })?;
893 uncompressed_size += metadata.len() as u64;
894 }
895 FileContents::Generated(generated_kind) => {
896 let contents = match generated_kind {
897 GeneratedFile::Manifest(_) => {
898 publish_pkg.manifest().to_normalized_contents()?
899 }
900 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
901 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
902 };
903 header.set_entry_type(EntryType::file());
904 header.set_mode(0o644);
905 header.set_size(contents.len() as u64);
906 header.set_mtime(1);
908 header.set_cksum();
909 ar.append_data(&mut header, &ar_path, contents.as_bytes())
910 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
911 uncompressed_size += contents.len() as u64;
912 }
913 }
914 }
915
916 let encoder = ar.into_inner()?;
917 encoder.finish()?;
918 Ok(uncompressed_size)
919}
920
921fn compare_resolve(
923 gctx: &GlobalContext,
924 current_pkg: &Package,
925 orig_resolve: &Resolve,
926 new_resolve: &Resolve,
927) -> CargoResult<()> {
928 if gctx.shell().verbosity() != Verbosity::Verbose {
929 return Ok(());
930 }
931 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
932 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
933 let added = new_set.difference(&orig_set);
934 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
937 for pkg_id in added {
938 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
939 continue;
942 }
943 let removed_candidates: Vec<&PackageId> = removed
946 .iter()
947 .filter(|orig_pkg_id| {
948 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
949 })
950 .cloned()
951 .collect();
952 let extra = match removed_candidates.len() {
953 0 => {
954 let previous_versions: Vec<&PackageId> = removed
956 .iter()
957 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
958 .cloned()
959 .collect();
960 match previous_versions.len() {
961 0 => String::new(),
962 1 => format!(
963 ", previous version was `{}`",
964 previous_versions[0].version()
965 ),
966 _ => format!(
967 ", previous versions were: {}",
968 previous_versions
969 .iter()
970 .map(|pkg_id| format!("`{}`", pkg_id.version()))
971 .collect::<Vec<_>>()
972 .join(", ")
973 ),
974 }
975 }
976 1 => {
977 format!(
981 ", was originally sourced from `{}`",
982 removed_candidates[0].source_id()
983 )
984 }
985 _ => {
986 let comma_list = removed_candidates
989 .iter()
990 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
991 .collect::<Vec<_>>()
992 .join(", ");
993 format!(
994 ", was originally sourced from one of these sources: {}",
995 comma_list
996 )
997 }
998 };
999 let msg = format!(
1000 "package `{}` added to the packaged Cargo.lock file{}",
1001 pkg_id, extra
1002 );
1003 gctx.shell().note(msg)?;
1004 }
1005 Ok(())
1006}
1007
1008pub fn check_yanked(
1009 gctx: &GlobalContext,
1010 pkg_set: &PackageSet<'_>,
1011 resolve: &Resolve,
1012 hint: &str,
1013) -> CargoResult<()> {
1014 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1017
1018 let mut sources = pkg_set.sources_mut();
1019 let mut pending: Vec<PackageId> = resolve.iter().collect();
1020 let mut results = Vec::new();
1021 for (_id, source) in sources.sources_mut() {
1022 source.invalidate_cache();
1023 }
1024 while !pending.is_empty() {
1025 pending.retain(|pkg_id| {
1026 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1027 match source.is_yanked(*pkg_id) {
1028 Poll::Ready(result) => results.push((*pkg_id, result)),
1029 Poll::Pending => return true,
1030 }
1031 }
1032 false
1033 });
1034 for (_id, source) in sources.sources_mut() {
1035 source.block_until_ready()?;
1036 }
1037 }
1038
1039 for (pkg_id, is_yanked) in results {
1040 if is_yanked? {
1041 gctx.shell().warn(format!(
1042 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1043 pkg_id,
1044 pkg_id.source_id().display_registry_name(),
1045 hint
1046 ))?;
1047 }
1048 }
1049 Ok(())
1050}
1051
1052fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1059 let Some(name) = file.file_name() else {
1060 return Ok(());
1061 };
1062 let Some(name) = name.to_str() else {
1063 anyhow::bail!(
1064 "path does not have a unicode filename which may not unpack \
1065 on all platforms: {}",
1066 file.display()
1067 )
1068 };
1069 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1070 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1071 anyhow::bail!(
1072 "cannot package a filename with a special character `{}`: {}",
1073 c,
1074 file.display()
1075 )
1076 }
1077 if restricted_names::is_windows_reserved_path(file) {
1078 shell.warn(format!(
1079 "file {} is a reserved Windows filename, \
1080 it will not work on Windows platforms",
1081 file.display()
1082 ))?;
1083 }
1084 Ok(())
1085}
1086
1087struct TmpRegistry<'a> {
1091 gctx: &'a GlobalContext,
1092 upstream: SourceId,
1093 root: Filesystem,
1094 checksums: HashMap<PackageId, String>,
1095 _lock: FileLock,
1096}
1097
1098impl<'a> TmpRegistry<'a> {
1099 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1100 root.create_dir()?;
1101 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1102 let slf = Self {
1103 gctx,
1104 root,
1105 upstream,
1106 checksums: HashMap::new(),
1107 _lock,
1108 };
1109 let index_path = slf.index_path().into_path_unlocked();
1111 if index_path.exists() {
1112 paths::remove_dir_all(index_path)?;
1113 }
1114 slf.index_path().create_dir()?;
1115 Ok(slf)
1116 }
1117
1118 fn index_path(&self) -> Filesystem {
1119 self.root.join("index")
1120 }
1121
1122 fn add_package(
1123 &mut self,
1124 ws: &Workspace<'_>,
1125 package: &Package,
1126 tar: &FileLock,
1127 ) -> CargoResult<()> {
1128 debug!(
1129 "adding package {}@{} to local overlay at {}",
1130 package.name(),
1131 package.version(),
1132 self.root.as_path_unlocked().display()
1133 );
1134 {
1135 let mut tar_copy = self.root.open_rw_exclusive_create(
1136 package.package_id().tarball_name(),
1137 self.gctx,
1138 "temporary package registry",
1139 )?;
1140 tar.file().seek(SeekFrom::Start(0))?;
1141 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1142 tar_copy.flush()?;
1143 }
1144
1145 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1146
1147 tar.file().seek(SeekFrom::Start(0))?;
1148 let cksum = cargo_util::Sha256::new()
1149 .update_file(tar.file())?
1150 .finish_hex();
1151
1152 self.checksums.insert(package.package_id(), cksum.clone());
1153
1154 let deps: Vec<_> = new_crate
1155 .deps
1156 .into_iter()
1157 .map(|dep| {
1158 let name = dep
1159 .explicit_name_in_toml
1160 .clone()
1161 .unwrap_or_else(|| dep.name.clone())
1162 .into();
1163 let package = dep
1164 .explicit_name_in_toml
1165 .as_ref()
1166 .map(|_| dep.name.clone().into());
1167 RegistryDependency {
1168 name: name,
1169 req: dep.version_req.into(),
1170 features: dep.features.into_iter().map(|x| x.into()).collect(),
1171 optional: dep.optional,
1172 default_features: dep.default_features,
1173 target: dep.target.map(|x| x.into()),
1174 kind: Some(dep.kind.into()),
1175 registry: dep.registry.map(|x| x.into()),
1176 package: package,
1177 public: None,
1178 artifact: dep
1179 .artifact
1180 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1181 bindep_target: dep.bindep_target.map(|x| x.into()),
1182 lib: dep.lib,
1183 }
1184 })
1185 .collect();
1186
1187 let index_line = serde_json::to_string(&IndexPackage {
1188 name: new_crate.name.into(),
1189 vers: package.version().clone(),
1190 deps,
1191 features: new_crate
1192 .features
1193 .into_iter()
1194 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1195 .collect(),
1196 features2: None,
1197 cksum,
1198 yanked: None,
1199 links: new_crate.links.map(|x| x.into()),
1200 rust_version: None,
1201 v: Some(2),
1202 })?;
1203
1204 let file =
1205 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1206 let mut dst = self.index_path().open_rw_exclusive_create(
1207 file,
1208 self.gctx,
1209 "temporary package registry",
1210 )?;
1211 dst.write_all(index_line.as_bytes())?;
1212 Ok(())
1213 }
1214
1215 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1216 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1217 }
1218}