1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::File;
5use std::io::SeekFrom;
6use std::io::prelude::*;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::PackageIdSpecQuery;
11use crate::core::Shell;
12use crate::core::Verbosity;
13use crate::core::Workspace;
14use crate::core::dependency::DepKind;
15use crate::core::manifest::Target;
16use crate::core::resolver::CliFeatures;
17use crate::core::resolver::HasDevUnits;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{RegistryOrIndex, infer_registry};
21use crate::sources::path::PathEntry;
22use crate::sources::{CRATES_IO_REGISTRY, PathSource};
23use crate::util::FileLock;
24use crate::util::Filesystem;
25use crate::util::GlobalContext;
26use crate::util::Graph;
27use crate::util::HumanBytes;
28use crate::util::cache_lock::CacheLockMode;
29use crate::util::context::JobsConfig;
30use crate::util::errors::CargoResult;
31use crate::util::errors::ManifestError;
32use crate::util::restricted_names;
33use crate::util::toml::prepare_for_publish;
34use crate::{drop_println, ops};
35use annotate_snippets::Level;
36use anyhow::{Context as _, bail};
37use cargo_util::paths;
38use cargo_util_schemas::index::{IndexPackage, RegistryDependency};
39use cargo_util_schemas::messages;
40use flate2::{Compression, GzBuilder};
41use tar::{Builder, EntryType, Header, HeaderMode};
42use tracing::debug;
43use unicase::Ascii as UncasedAscii;
44
45mod vcs;
46mod verify;
47
48#[derive(Debug, Clone)]
52pub enum PackageMessageFormat {
53 Human,
54 Json,
55}
56
57impl PackageMessageFormat {
58 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
59
60 pub const DEFAULT: &str = "human";
61}
62
63impl std::str::FromStr for PackageMessageFormat {
64 type Err = anyhow::Error;
65
66 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
67 match s {
68 "human" => Ok(PackageMessageFormat::Human),
69 "json" => Ok(PackageMessageFormat::Json),
70 f => bail!("unknown message format `{f}`"),
71 }
72 }
73}
74
75#[derive(Clone)]
76pub struct PackageOpts<'gctx> {
77 pub gctx: &'gctx GlobalContext,
78 pub list: bool,
79 pub fmt: PackageMessageFormat,
80 pub check_metadata: bool,
81 pub allow_dirty: bool,
82 pub include_lockfile: bool,
83 pub verify: bool,
84 pub jobs: Option<JobsConfig>,
85 pub keep_going: bool,
86 pub to_package: ops::Packages,
87 pub targets: Vec<String>,
88 pub cli_features: CliFeatures,
89 pub reg_or_index: Option<ops::RegistryOrIndex>,
90 pub dry_run: bool,
103}
104
105const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
106const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
107
108struct ArchiveFile {
109 rel_path: PathBuf,
112 rel_str: String,
114 contents: FileContents,
116}
117
118enum FileContents {
119 OnDisk(PathBuf),
121 Generated(GeneratedFile),
123}
124
125enum GeneratedFile {
126 Manifest(PathBuf),
130 Lockfile(Option<PathBuf>),
134 VcsInfo(vcs::VcsInfo),
136}
137
138#[tracing::instrument(skip_all)]
140fn create_package(
141 ws: &Workspace<'_>,
142 opts: &PackageOpts<'_>,
143 pkg: &Package,
144 ar_files: Vec<ArchiveFile>,
145 local_reg: Option<&TmpRegistry<'_>>,
146) -> CargoResult<FileLock> {
147 let gctx = ws.gctx();
148 let filecount = ar_files.len();
149
150 for dep in pkg.dependencies() {
152 super::check_dep_has_version(dep, false).map_err(|err| {
153 ManifestError::new(
154 err.context(format!(
155 "failed to verify manifest at `{}`",
156 pkg.manifest_path().display()
157 )),
158 pkg.manifest_path().into(),
159 )
160 })?;
161 }
162
163 let filename = pkg.package_id().tarball_name();
164 let dir = ws.build_dir().join("package");
165 let mut dst = {
166 let tmp = format!(".{}", filename);
167 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
168 };
169
170 gctx.shell()
175 .status("Packaging", pkg.package_id().to_string())?;
176 dst.file().set_len(0)?;
177 let uncompressed_size = tar(ws, opts, pkg, local_reg, ar_files, dst.file(), &filename)
178 .context("failed to prepare local package for uploading")?;
179
180 dst.seek(SeekFrom::Start(0))?;
181 let dst_path = dst.parent().join(&filename);
182 dst.rename(&dst_path)?;
183
184 let dst_metadata = dst
185 .file()
186 .metadata()
187 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
188 let compressed_size = dst_metadata.len();
189
190 let uncompressed = HumanBytes(uncompressed_size);
191 let compressed = HumanBytes(compressed_size);
192
193 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
194 drop(gctx.shell().status("Packaged", message));
196
197 return Ok(dst);
198}
199
200pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
205 let specs = &opts.to_package.to_package_id_specs(ws)?;
206 if let ops::Packages::Packages(_) = opts.to_package {
208 for spec in specs.iter() {
209 let member_ids = ws.members().map(|p| p.package_id());
210 spec.query(member_ids)?;
211 }
212 }
213 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
214
215 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
218
219 let packaged = do_package(ws, opts, pkgs)?;
220
221 let mut result = Vec::new();
222 let target_dir = ws.target_dir();
223 let build_dir = ws.build_dir();
224 if target_dir == build_dir {
225 result.extend(packaged.into_iter().map(|(_, _, src)| src));
226 } else {
227 let artifact_dir = target_dir.join("package");
229 for (pkg, _, src) in packaged {
230 let filename = pkg.package_id().tarball_name();
231 let dst =
232 artifact_dir.open_rw_exclusive_create(filename, ws.gctx(), "uplifted package")?;
233 src.file().seek(SeekFrom::Start(0))?;
234 std::io::copy(&mut src.file(), &mut dst.file())?;
235 result.push(dst);
236 }
237 }
238
239 Ok(result)
240}
241
242pub(crate) fn package_with_dep_graph(
248 ws: &Workspace<'_>,
249 opts: &PackageOpts<'_>,
250 pkgs: Vec<(&Package, CliFeatures)>,
251) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
252 let output = do_package(ws, opts, pkgs)?;
253
254 Ok(local_deps(output.into_iter().map(
255 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
256 )))
257}
258
259fn do_package<'a>(
260 ws: &Workspace<'_>,
261 opts: &PackageOpts<'a>,
262 pkgs: Vec<(&Package, CliFeatures)>,
263) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
264 if ws
265 .lock_root()
266 .as_path_unlocked()
267 .join(LOCKFILE_NAME)
268 .exists()
269 && opts.include_lockfile
270 {
271 let dry_run = false;
273 let _ = ops::resolve_ws(ws, dry_run)?;
274 }
277
278 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
279 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
280
281 let mut local_reg = {
282 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
287 || opts.reg_or_index.is_some()
288 {
289 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
290 debug!("packaging for registry {}", sid);
291 Some(sid)
292 } else {
293 None
294 };
295 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
296 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
297 .transpose()?
298 };
299
300 let sorted_pkgs = deps.sort();
303 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
304 for (pkg, cli_features) in sorted_pkgs {
305 let opts = PackageOpts {
306 cli_features: cli_features.clone(),
307 to_package: ops::Packages::Default,
308 ..opts.clone()
309 };
310 let ar_files = prepare_archive(ws, &pkg, &opts)?;
311
312 if opts.list {
313 match opts.fmt {
314 PackageMessageFormat::Human => {
315 for ar_file in &ar_files {
318 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
319 }
320 }
321 PackageMessageFormat::Json => {
322 let message = messages::PackageList {
323 id: pkg.package_id().to_spec(),
324 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
325 let file = match f.contents {
326 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
327 FileContents::Generated(
328 GeneratedFile::Manifest(path)
329 | GeneratedFile::Lockfile(Some(path)),
330 ) => messages::PackageFile::Generate { path: Some(path) },
331 FileContents::Generated(
332 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
333 ) => messages::PackageFile::Generate { path: None },
334 };
335 (f.rel_path, file)
336 })),
337 };
338 let _ = ws.gctx().shell().print_json(&message);
339 }
340 }
341 } else {
342 let tarball = create_package(ws, &opts, &pkg, ar_files, local_reg.as_ref())?;
343 if let Some(local_reg) = local_reg.as_mut() {
344 if pkg.publish() != &Some(Vec::new()) {
345 local_reg.add_package(ws, &pkg, &tarball)?;
346 }
347 }
348 outputs.push((pkg, opts, tarball));
349 }
350 }
351
352 if opts.verify {
355 for (pkg, opts, tarball) in &outputs {
356 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
357 .context("failed to verify package tarball")?
358 }
359 }
360
361 Ok(outputs)
362}
363
364fn get_registry(
371 gctx: &GlobalContext,
372 pkgs: &[&Package],
373 reg_or_index: Option<RegistryOrIndex>,
374) -> CargoResult<SourceId> {
375 let reg_or_index = match reg_or_index.clone() {
376 Some(r) => Some(r),
377 None => infer_registry(pkgs)?,
378 };
379
380 let reg = reg_or_index
382 .clone()
383 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
384 if let RegistryOrIndex::Registry(reg_name) = reg {
385 for pkg in pkgs {
386 if let Some(allowed) = pkg.publish().as_ref() {
387 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
391 bail!(
392 "`{}` cannot be packaged.\n\
393 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
394 pkg.name(),
395 reg_name
396 );
397 }
398 }
399 }
400 }
401 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
402}
403
404#[derive(Clone, Debug, Default)]
406pub(crate) struct LocalDependencies<T> {
407 pub packages: HashMap<PackageId, (Package, T)>,
408 pub graph: Graph<PackageId, ()>,
409}
410
411impl<T: Clone> LocalDependencies<T> {
412 pub fn sort(&self) -> Vec<(Package, T)> {
413 self.graph
414 .sort()
415 .into_iter()
416 .map(|name| self.packages[&name].clone())
417 .collect()
418 }
419
420 pub fn has_dependencies(&self) -> bool {
421 self.graph
422 .iter()
423 .any(|node| self.graph.edges(node).next().is_some())
424 }
425}
426
427fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
432 let packages: HashMap<PackageId, (Package, T)> = packages
433 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
434 .collect();
435
436 let source_to_pkg: HashMap<_, _> = packages
441 .keys()
442 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
443 .collect();
444
445 let mut graph = Graph::new();
446 for (pkg, _payload) in packages.values() {
447 graph.add(pkg.package_id());
448 for dep in pkg.dependencies() {
449 if !dep.source_id().is_path() {
451 continue;
452 }
453
454 if dep.kind() == DepKind::Development && !dep.specified_req() {
457 continue;
458 };
459
460 if dep.source_id() == pkg.package_id().source_id() {
462 continue;
463 }
464
465 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
466 graph.link(pkg.package_id(), *dep_pkg);
467 }
468 }
469 }
470
471 LocalDependencies { packages, graph }
472}
473
474#[tracing::instrument(skip_all)]
476fn prepare_archive(
477 ws: &Workspace<'_>,
478 pkg: &Package,
479 opts: &PackageOpts<'_>,
480) -> CargoResult<Vec<ArchiveFile>> {
481 let gctx = ws.gctx();
482 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
483 src.load()?;
484
485 if opts.check_metadata {
486 check_metadata(pkg, opts.reg_or_index.as_ref(), gctx)?;
487 }
488
489 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
490 gctx.shell().warn(
491 "both package.include and package.exclude are specified; \
492 the exclude list will be ignored",
493 )?;
494 }
495 let src_files = src.list_files(pkg)?;
496
497 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
499 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
500}
501
502#[tracing::instrument(skip_all)]
504fn build_ar_list(
505 ws: &Workspace<'_>,
506 pkg: &Package,
507 src_files: Vec<PathEntry>,
508 vcs_info: Option<vcs::VcsInfo>,
509 include_lockfile: bool,
510) -> CargoResult<Vec<ArchiveFile>> {
511 let mut result = HashMap::new();
512 let root = pkg.root();
513 for src_file in &src_files {
514 let rel_path = src_file.strip_prefix(&root)?;
515 check_filename(rel_path, &mut ws.gctx().shell())?;
516 let rel_str = rel_path.to_str().ok_or_else(|| {
517 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
518 })?;
519 match rel_str {
520 "Cargo.lock" => continue,
521 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
522 "invalid inclusion of reserved file name {} in package source",
523 rel_str
524 ),
525 _ => {
526 result
527 .entry(UncasedAscii::new(rel_str))
528 .or_insert_with(Vec::new)
529 .push(ArchiveFile {
530 rel_path: rel_path.to_owned(),
531 rel_str: rel_str.to_owned(),
532 contents: FileContents::OnDisk(src_file.to_path_buf()),
533 });
534 }
535 }
536 }
537
538 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
541 result
542 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
543 .or_insert_with(Vec::new)
544 .push(ArchiveFile {
545 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
546 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
547 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
548 });
549 result
550 .entry(UncasedAscii::new("Cargo.toml"))
551 .or_insert_with(Vec::new)
552 .push(ArchiveFile {
553 rel_path: PathBuf::from("Cargo.toml"),
554 rel_str: "Cargo.toml".to_string(),
555 contents: FileContents::Generated(GeneratedFile::Manifest(
556 pkg.manifest_path().to_owned(),
557 )),
558 });
559 } else {
560 ws.gctx().shell().warn(&format!(
561 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
562 pkg.name()
563 ))?;
564 }
565
566 if include_lockfile {
567 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
568 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
569 let rel_str = "Cargo.lock";
570 result
571 .entry(UncasedAscii::new(rel_str))
572 .or_insert_with(Vec::new)
573 .push(ArchiveFile {
574 rel_path: PathBuf::from(rel_str),
575 rel_str: rel_str.to_string(),
576 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
577 });
578 }
579
580 if let Some(vcs_info) = vcs_info {
581 let rel_str = VCS_INFO_FILE;
582 result
583 .entry(UncasedAscii::new(rel_str))
584 .or_insert_with(Vec::new)
585 .push(ArchiveFile {
586 rel_path: PathBuf::from(rel_str),
587 rel_str: rel_str.to_string(),
588 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
589 });
590 }
591
592 let mut invalid_manifest_field: Vec<String> = vec![];
593
594 let mut result = result.into_values().flatten().collect();
595 if let Some(license_file) = &pkg.manifest().metadata().license_file {
596 let license_path = Path::new(license_file);
597 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
598 if abs_file_path.is_file() {
599 check_for_file_and_add(
600 "license-file",
601 license_path,
602 abs_file_path,
603 pkg,
604 &mut result,
605 ws,
606 )?;
607 } else {
608 error_on_nonexistent_file(
609 &pkg,
610 &license_path,
611 "license-file",
612 &mut invalid_manifest_field,
613 );
614 }
615 }
616 if let Some(readme) = &pkg.manifest().metadata().readme {
617 let readme_path = Path::new(readme);
618 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
619 if abs_file_path.is_file() {
620 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
621 } else {
622 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
623 }
624 }
625
626 if !invalid_manifest_field.is_empty() {
627 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
628 }
629
630 for t in pkg
631 .manifest()
632 .targets()
633 .iter()
634 .filter(|t| t.is_custom_build())
635 {
636 if let Some(custom_build_path) = t.src_path().path() {
637 let abs_custom_build_path = paths::normalize_path(&pkg.root().join(custom_build_path));
638 if !abs_custom_build_path.is_file() || !abs_custom_build_path.starts_with(pkg.root()) {
639 error_custom_build_file_not_in_package(pkg, &abs_custom_build_path, t)?;
640 }
641 }
642 }
643
644 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
645
646 Ok(result)
647}
648
649fn check_for_file_and_add(
650 label: &str,
651 file_path: &Path,
652 abs_file_path: PathBuf,
653 pkg: &Package,
654 result: &mut Vec<ArchiveFile>,
655 ws: &Workspace<'_>,
656) -> CargoResult<()> {
657 match abs_file_path.strip_prefix(&pkg.root()) {
658 Ok(rel_file_path) => {
659 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
660 result.push(ArchiveFile {
661 rel_path: rel_file_path.to_path_buf(),
662 rel_str: rel_file_path
663 .to_str()
664 .expect("everything was utf8")
665 .to_string(),
666 contents: FileContents::OnDisk(abs_file_path),
667 })
668 }
669 }
670 Err(_) => {
671 let file_name = file_path.file_name().unwrap();
673 if result.iter().any(|ar| ar.rel_path == file_name) {
674 ws.gctx().shell().warn(&format!(
675 "{} `{}` appears to be a path outside of the package, \
676 but there is already a file named `{}` in the root of the package. \
677 The archived crate will contain the copy in the root of the package. \
678 Update the {} to point to the path relative \
679 to the root of the package to remove this warning.",
680 label,
681 file_path.display(),
682 file_name.to_str().unwrap(),
683 label,
684 ))?;
685 } else {
686 result.push(ArchiveFile {
687 rel_path: PathBuf::from(file_name),
688 rel_str: file_name.to_str().unwrap().to_string(),
689 contents: FileContents::OnDisk(abs_file_path),
690 })
691 }
692 }
693 }
694 Ok(())
695}
696
697fn error_on_nonexistent_file(
698 pkg: &Package,
699 path: &Path,
700 manifest_key_name: &'static str,
701 invalid: &mut Vec<String>,
702) {
703 let rel_msg = if path.is_absolute() {
704 "".to_string()
705 } else {
706 format!(" (relative to `{}`)", pkg.root().display())
707 };
708
709 let msg = format!(
710 "{manifest_key_name} `{}` does not appear to exist{}.\n\
711 Please update the {manifest_key_name} setting in the manifest at `{}`.",
712 path.display(),
713 rel_msg,
714 pkg.manifest_path().display()
715 );
716
717 invalid.push(msg);
718}
719
720fn error_custom_build_file_not_in_package(
721 pkg: &Package,
722 path: &Path,
723 target: &Target,
724) -> CargoResult<Vec<ArchiveFile>> {
725 let tip = {
726 let description_name = target.description_named();
727 if path.is_file() {
728 format!(
729 "the source file of {description_name} doesn't appear to be a path inside of the package.\n\
730 It is at `{}`, whereas the root the package is `{}`.\n",
731 path.display(),
732 pkg.root().display()
733 )
734 } else {
735 format!("the source file of {description_name} doesn't appear to exist.\n",)
736 }
737 };
738 let msg = format!(
739 "{}\
740 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
741 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
742 tip,
743 pkg.manifest_path().display()
744 );
745 anyhow::bail!(msg)
746}
747
748fn build_lock(
750 ws: &Workspace<'_>,
751 opts: &PackageOpts<'_>,
752 publish_pkg: &Package,
753 local_reg: Option<&TmpRegistry<'_>>,
754) -> CargoResult<String> {
755 let gctx = ws.gctx();
756 let mut orig_resolve = ops::load_pkg_lockfile(ws)?;
757
758 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
759
760 if let Some(local_reg) = local_reg {
764 tmp_ws.add_local_overlay(
765 local_reg.upstream,
766 local_reg.root.as_path_unlocked().to_owned(),
767 );
768 if opts.dry_run {
769 if let Some(orig_resolve) = orig_resolve.as_mut() {
770 let upstream_in_lock = if local_reg.upstream.is_crates_io() {
771 SourceId::crates_io(gctx)?
772 } else {
773 local_reg.upstream
774 };
775 for (p, s) in local_reg.checksums() {
776 orig_resolve.set_checksum(p.with_source_id(upstream_in_lock), s.to_owned());
777 }
778 }
779 }
780 }
781 let mut tmp_reg = tmp_ws.package_registry()?;
782
783 let mut new_resolve = ops::resolve_with_previous(
784 &mut tmp_reg,
785 &tmp_ws,
786 &CliFeatures::new_all(true),
787 HasDevUnits::Yes,
788 orig_resolve.as_ref(),
789 None,
790 &[],
791 true,
792 )?;
793
794 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
795
796 if let Some(orig_resolve) = orig_resolve {
797 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
798 }
799 check_yanked(
800 gctx,
801 &pkg_set,
802 &new_resolve,
803 "consider updating to a version that is not yanked",
804 )?;
805
806 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
807}
808
809fn check_metadata(
812 pkg: &Package,
813 reg_or_index: Option<&RegistryOrIndex>,
814 gctx: &GlobalContext,
815) -> CargoResult<()> {
816 let md = pkg.manifest().metadata();
817
818 let mut missing = vec![];
819
820 macro_rules! lacking {
821 ($( $($field: ident)||* ),*) => {{
822 $(
823 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
824 $(missing.push(stringify!($field).replace("_", "-"));)*
825 }
826 )*
827 }}
828 }
829 lacking!(
830 description,
831 license || license_file,
832 documentation || homepage || repository
833 );
834
835 if !missing.is_empty() {
836 let should_warn = match reg_or_index {
838 Some(RegistryOrIndex::Registry(reg_name)) => reg_name == CRATES_IO_REGISTRY,
839 None => true, Some(RegistryOrIndex::Index(_)) => false, };
842
843 if should_warn {
844 let mut things = missing[..missing.len() - 1].join(", ");
845 if !things.is_empty() {
848 things.push_str(" or ");
849 }
850 things.push_str(missing.last().unwrap());
851
852 gctx.shell().print_report(&[
853 Level::WARNING.secondary_title(format!("manifest has no {things}"))
854 .element(Level::NOTE.message("see https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info"))
855 ],
856 false
857 )?
858 }
859 }
860
861 Ok(())
862}
863
864fn tar(
868 ws: &Workspace<'_>,
869 opts: &PackageOpts<'_>,
870 pkg: &Package,
871 local_reg: Option<&TmpRegistry<'_>>,
872 ar_files: Vec<ArchiveFile>,
873 dst: &File,
874 filename: &str,
875) -> CargoResult<u64> {
876 let filename = Path::new(filename);
878 let encoder = GzBuilder::new()
879 .filename(paths::path2bytes(filename)?)
880 .write(dst, Compression::best());
881
882 let mut ar = Builder::new(encoder);
884 ar.sparse(false);
885 let gctx = ws.gctx();
886
887 let base_name = format!("{}-{}", pkg.name(), pkg.version());
888 let base_path = Path::new(&base_name);
889 let included = ar_files
890 .iter()
891 .map(|ar_file| ar_file.rel_path.clone())
892 .collect::<Vec<_>>();
893 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
894
895 let mut uncompressed_size = 0;
896 for ar_file in ar_files {
897 let ArchiveFile {
898 rel_path,
899 rel_str,
900 contents,
901 } = ar_file;
902 let ar_path = base_path.join(&rel_path);
903 gctx.shell()
904 .verbose(|shell| shell.status("Archiving", &rel_str))?;
905 let mut header = Header::new_gnu();
906 match contents {
907 FileContents::OnDisk(disk_path) => {
908 let mut file = File::open(&disk_path).with_context(|| {
909 format!("failed to open for archiving: `{}`", disk_path.display())
910 })?;
911 let metadata = file.metadata().with_context(|| {
912 format!("could not learn metadata for: `{}`", disk_path.display())
913 })?;
914 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
915 header.set_cksum();
916 ar.append_data(&mut header, &ar_path, &mut file)
917 .with_context(|| {
918 format!("could not archive source file `{}`", disk_path.display())
919 })?;
920 uncompressed_size += metadata.len() as u64;
921 }
922 FileContents::Generated(generated_kind) => {
923 let contents = match generated_kind {
924 GeneratedFile::Manifest(_) => {
925 publish_pkg.manifest().to_normalized_contents()?
926 }
927 GeneratedFile::Lockfile(_) => build_lock(ws, opts, &publish_pkg, local_reg)?,
928 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
929 };
930 header.set_entry_type(EntryType::file());
931 header.set_mode(0o644);
932 header.set_size(contents.len() as u64);
933 header.set_mtime(1153704088);
939 header.set_cksum();
940 ar.append_data(&mut header, &ar_path, contents.as_bytes())
941 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
942 uncompressed_size += contents.len() as u64;
943 }
944 }
945 }
946
947 let encoder = ar.into_inner()?;
948 encoder.finish()?;
949 Ok(uncompressed_size)
950}
951
952fn compare_resolve(
954 gctx: &GlobalContext,
955 current_pkg: &Package,
956 orig_resolve: &Resolve,
957 new_resolve: &Resolve,
958) -> CargoResult<()> {
959 if gctx.shell().verbosity() != Verbosity::Verbose {
960 return Ok(());
961 }
962 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
963 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
964 let added = new_set.difference(&orig_set);
965 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
968 for pkg_id in added {
969 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
970 continue;
973 }
974 let removed_candidates: Vec<&PackageId> = removed
977 .iter()
978 .filter(|orig_pkg_id| {
979 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
980 })
981 .cloned()
982 .collect();
983 let extra = match removed_candidates.len() {
984 0 => {
985 let previous_versions: Vec<&PackageId> = removed
987 .iter()
988 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
989 .cloned()
990 .collect();
991 match previous_versions.len() {
992 0 => String::new(),
993 1 => format!(
994 ", previous version was `{}`",
995 previous_versions[0].version()
996 ),
997 _ => format!(
998 ", previous versions were: {}",
999 previous_versions
1000 .iter()
1001 .map(|pkg_id| format!("`{}`", pkg_id.version()))
1002 .collect::<Vec<_>>()
1003 .join(", ")
1004 ),
1005 }
1006 }
1007 1 => {
1008 format!(
1012 ", was originally sourced from `{}`",
1013 removed_candidates[0].source_id()
1014 )
1015 }
1016 _ => {
1017 let comma_list = removed_candidates
1020 .iter()
1021 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
1022 .collect::<Vec<_>>()
1023 .join(", ");
1024 format!(
1025 ", was originally sourced from one of these sources: {}",
1026 comma_list
1027 )
1028 }
1029 };
1030 let msg = format!(
1031 "package `{}` added to the packaged Cargo.lock file{}",
1032 pkg_id, extra
1033 );
1034 gctx.shell().note(msg)?;
1035 }
1036 Ok(())
1037}
1038
1039pub fn check_yanked(
1040 gctx: &GlobalContext,
1041 pkg_set: &PackageSet<'_>,
1042 resolve: &Resolve,
1043 hint: &str,
1044) -> CargoResult<()> {
1045 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
1048
1049 let mut sources = pkg_set.sources_mut();
1050 let mut pending: Vec<PackageId> = resolve.iter().collect();
1051 let mut results = Vec::new();
1052 for (_id, source) in sources.sources_mut() {
1053 source.invalidate_cache();
1054 }
1055 while !pending.is_empty() {
1056 pending.retain(|pkg_id| {
1057 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
1058 match source.is_yanked(*pkg_id) {
1059 Poll::Ready(result) => results.push((*pkg_id, result)),
1060 Poll::Pending => return true,
1061 }
1062 }
1063 false
1064 });
1065 for (_id, source) in sources.sources_mut() {
1066 source.block_until_ready()?;
1067 }
1068 }
1069
1070 for (pkg_id, is_yanked) in results {
1071 if is_yanked? {
1072 gctx.shell().warn(format!(
1073 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1074 pkg_id,
1075 pkg_id.source_id().display_registry_name(),
1076 hint
1077 ))?;
1078 }
1079 }
1080 Ok(())
1081}
1082
1083fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1090 let Some(name) = file.file_name() else {
1091 return Ok(());
1092 };
1093 let Some(name) = name.to_str() else {
1094 anyhow::bail!(
1095 "path does not have a unicode filename which may not unpack \
1096 on all platforms: {}",
1097 file.display()
1098 )
1099 };
1100 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1101 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1102 anyhow::bail!(
1103 "cannot package a filename with a special character `{}`: {}",
1104 c,
1105 file.display()
1106 )
1107 }
1108 if restricted_names::is_windows_reserved_path(file) {
1109 shell.warn(format!(
1110 "file {} is a reserved Windows filename, \
1111 it will not work on Windows platforms",
1112 file.display()
1113 ))?;
1114 }
1115 Ok(())
1116}
1117
1118struct TmpRegistry<'a> {
1122 gctx: &'a GlobalContext,
1123 upstream: SourceId,
1124 root: Filesystem,
1125 checksums: HashMap<PackageId, String>,
1126 _lock: FileLock,
1127}
1128
1129impl<'a> TmpRegistry<'a> {
1130 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1131 root.create_dir()?;
1132 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1133 let slf = Self {
1134 gctx,
1135 root,
1136 upstream,
1137 checksums: HashMap::new(),
1138 _lock,
1139 };
1140 let index_path = slf.index_path().into_path_unlocked();
1142 if index_path.exists() {
1143 paths::remove_dir_all(index_path)?;
1144 }
1145 slf.index_path().create_dir()?;
1146 Ok(slf)
1147 }
1148
1149 fn index_path(&self) -> Filesystem {
1150 self.root.join("index")
1151 }
1152
1153 fn add_package(
1154 &mut self,
1155 ws: &Workspace<'_>,
1156 package: &Package,
1157 tar: &FileLock,
1158 ) -> CargoResult<()> {
1159 debug!(
1160 "adding package {}@{} to local overlay at {}",
1161 package.name(),
1162 package.version(),
1163 self.root.as_path_unlocked().display()
1164 );
1165 {
1166 let mut tar_copy = self.root.open_rw_exclusive_create(
1167 package.package_id().tarball_name(),
1168 self.gctx,
1169 "temporary package registry",
1170 )?;
1171 tar.file().seek(SeekFrom::Start(0))?;
1172 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1173 tar_copy.flush()?;
1174 }
1175
1176 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1177
1178 tar.file().seek(SeekFrom::Start(0))?;
1179 let cksum = cargo_util::Sha256::new()
1180 .update_file(tar.file())?
1181 .finish_hex();
1182
1183 self.checksums.insert(package.package_id(), cksum.clone());
1184
1185 let deps: Vec<_> = new_crate
1186 .deps
1187 .into_iter()
1188 .map(|dep| {
1189 let name = dep
1190 .explicit_name_in_toml
1191 .clone()
1192 .unwrap_or_else(|| dep.name.clone())
1193 .into();
1194 let package = dep
1195 .explicit_name_in_toml
1196 .as_ref()
1197 .map(|_| dep.name.clone().into());
1198 RegistryDependency {
1199 name: name,
1200 req: dep.version_req.into(),
1201 features: dep.features.into_iter().map(|x| x.into()).collect(),
1202 optional: dep.optional,
1203 default_features: dep.default_features,
1204 target: dep.target.map(|x| x.into()),
1205 kind: Some(dep.kind.into()),
1206 registry: dep.registry.map(|x| x.into()),
1207 package: package,
1208 public: None,
1209 artifact: dep
1210 .artifact
1211 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1212 bindep_target: dep.bindep_target.map(|x| x.into()),
1213 lib: dep.lib,
1214 }
1215 })
1216 .collect();
1217
1218 let index_line = serde_json::to_string(&IndexPackage {
1219 name: new_crate.name.into(),
1220 vers: package.version().clone(),
1221 deps,
1222 features: new_crate
1223 .features
1224 .into_iter()
1225 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1226 .collect(),
1227 features2: None,
1228 cksum,
1229 yanked: None,
1230 links: new_crate.links.map(|x| x.into()),
1231 rust_version: None,
1232 pubtime: None,
1233 v: Some(2),
1234 })?;
1235
1236 let file =
1237 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1238 let mut dst = self.index_path().open_rw_exclusive_create(
1239 file,
1240 self.gctx,
1241 "temporary package registry",
1242 )?;
1243 dst.write_all(index_line.as_bytes())?;
1244 Ok(())
1245 }
1246
1247 fn checksums(&self) -> impl Iterator<Item = (PackageId, &str)> {
1248 self.checksums.iter().map(|(p, s)| (*p, s.as_str()))
1249 }
1250}