1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::{self, File};
5use std::io::prelude::*;
6use std::io::SeekFrom;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::dependency::DepKind;
11use crate::core::manifest::Target;
12use crate::core::resolver::CliFeatures;
13use crate::core::resolver::HasDevUnits;
14use crate::core::PackageIdSpecQuery;
15use crate::core::Shell;
16use crate::core::Verbosity;
17use crate::core::Workspace;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{infer_registry, RegistryOrIndex};
21use crate::sources::path::PathEntry;
22use crate::sources::registry::index::{IndexPackage, RegistryDependency};
23use crate::sources::{PathSource, CRATES_IO_REGISTRY};
24use crate::util::cache_lock::CacheLockMode;
25use crate::util::context::JobsConfig;
26use crate::util::errors::CargoResult;
27use crate::util::errors::ManifestError;
28use crate::util::restricted_names;
29use crate::util::toml::prepare_for_publish;
30use crate::util::FileLock;
31use crate::util::Filesystem;
32use crate::util::GlobalContext;
33use crate::util::Graph;
34use crate::util::HumanBytes;
35use crate::{drop_println, ops};
36use anyhow::{bail, Context as _};
37use cargo_util::paths;
38use cargo_util_schemas::messages;
39use flate2::{Compression, GzBuilder};
40use tar::{Builder, EntryType, Header, HeaderMode};
41use tracing::debug;
42use unicase::Ascii as UncasedAscii;
43
44mod vcs;
45mod verify;
46
47#[derive(Debug, Clone)]
51pub enum PackageMessageFormat {
52 Human,
53 Json,
54}
55
56impl PackageMessageFormat {
57 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
58
59 pub const DEFAULT: &str = "human";
60}
61
62impl std::str::FromStr for PackageMessageFormat {
63 type Err = anyhow::Error;
64
65 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
66 match s {
67 "human" => Ok(PackageMessageFormat::Human),
68 "json" => Ok(PackageMessageFormat::Json),
69 f => bail!("unknown message format `{f}`"),
70 }
71 }
72}
73
74#[derive(Clone)]
75pub struct PackageOpts<'gctx> {
76 pub gctx: &'gctx GlobalContext,
77 pub list: bool,
78 pub fmt: PackageMessageFormat,
79 pub check_metadata: bool,
80 pub allow_dirty: bool,
81 pub include_lockfile: bool,
82 pub verify: bool,
83 pub jobs: Option<JobsConfig>,
84 pub keep_going: bool,
85 pub to_package: ops::Packages,
86 pub targets: Vec<String>,
87 pub cli_features: CliFeatures,
88 pub reg_or_index: Option<ops::RegistryOrIndex>,
89}
90
91const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
92const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
93
94struct ArchiveFile {
95 rel_path: PathBuf,
98 rel_str: String,
100 contents: FileContents,
102}
103
104enum FileContents {
105 OnDisk(PathBuf),
107 Generated(GeneratedFile),
109}
110
111enum GeneratedFile {
112 Manifest(PathBuf),
116 Lockfile(Option<PathBuf>),
120 VcsInfo(vcs::VcsInfo),
122}
123
124#[tracing::instrument(skip_all)]
126fn create_package(
127 ws: &Workspace<'_>,
128 pkg: &Package,
129 ar_files: Vec<ArchiveFile>,
130 local_reg: Option<&TmpRegistry<'_>>,
131) -> CargoResult<FileLock> {
132 let gctx = ws.gctx();
133 let filecount = ar_files.len();
134
135 for dep in pkg.dependencies() {
137 super::check_dep_has_version(dep, false).map_err(|err| {
138 ManifestError::new(
139 err.context(format!(
140 "failed to verify manifest at `{}`",
141 pkg.manifest_path().display()
142 )),
143 pkg.manifest_path().into(),
144 )
145 })?;
146 }
147
148 let filename = pkg.package_id().tarball_name();
149 let dir = ws.target_dir().join("package");
150 let mut dst = {
151 let tmp = format!(".{}", filename);
152 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
153 };
154
155 gctx.shell()
160 .status("Packaging", pkg.package_id().to_string())?;
161 dst.file().set_len(0)?;
162 let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
163 .context("failed to prepare local package for uploading")?;
164
165 dst.seek(SeekFrom::Start(0))?;
166 let src_path = dst.path();
167 let dst_path = dst.parent().join(&filename);
168 fs::rename(&src_path, &dst_path)
169 .context("failed to move temporary tarball into final location")?;
170
171 let dst_metadata = dst
172 .file()
173 .metadata()
174 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
175 let compressed_size = dst_metadata.len();
176
177 let uncompressed = HumanBytes(uncompressed_size);
178 let compressed = HumanBytes(compressed_size);
179
180 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
181 drop(gctx.shell().status("Packaged", message));
183
184 return Ok(dst);
185}
186
187pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
192 let specs = &opts.to_package.to_package_id_specs(ws)?;
193 if let ops::Packages::Packages(_) = opts.to_package {
195 for spec in specs.iter() {
196 let member_ids = ws.members().map(|p| p.package_id());
197 spec.query(member_ids)?;
198 }
199 }
200 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
201
202 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
205
206 Ok(do_package(ws, opts, pkgs)?
207 .into_iter()
208 .map(|x| x.2)
209 .collect())
210}
211
212pub(crate) fn package_with_dep_graph(
218 ws: &Workspace<'_>,
219 opts: &PackageOpts<'_>,
220 pkgs: Vec<(&Package, CliFeatures)>,
221) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
222 let output = do_package(ws, opts, pkgs)?;
223
224 Ok(local_deps(output.into_iter().map(
225 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
226 )))
227}
228
229fn do_package<'a>(
230 ws: &Workspace<'_>,
231 opts: &PackageOpts<'a>,
232 pkgs: Vec<(&Package, CliFeatures)>,
233) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
234 if ws
235 .lock_root()
236 .as_path_unlocked()
237 .join(LOCKFILE_NAME)
238 .exists()
239 && opts.include_lockfile
240 {
241 let dry_run = false;
243 let _ = ops::resolve_ws(ws, dry_run)?;
244 }
247
248 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
249 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
250
251 let mut local_reg = if ws.gctx().cli_unstable().package_workspace {
252 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
257 || opts.reg_or_index.is_some()
258 {
259 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
260 debug!("packaging for registry {}", sid);
261 Some(sid)
262 } else {
263 None
264 };
265 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
266 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
267 .transpose()?
268 } else {
269 None
270 };
271
272 let sorted_pkgs = deps.sort();
275 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
276 for (pkg, cli_features) in sorted_pkgs {
277 let opts = PackageOpts {
278 cli_features: cli_features.clone(),
279 to_package: ops::Packages::Default,
280 ..opts.clone()
281 };
282 let ar_files = prepare_archive(ws, &pkg, &opts)?;
283
284 if opts.list {
285 match opts.fmt {
286 PackageMessageFormat::Human => {
287 for ar_file in &ar_files {
290 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
291 }
292 }
293 PackageMessageFormat::Json => {
294 let message = messages::PackageList {
295 id: pkg.package_id().to_spec(),
296 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
297 let file = match f.contents {
298 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
299 FileContents::Generated(
300 GeneratedFile::Manifest(path)
301 | GeneratedFile::Lockfile(Some(path)),
302 ) => messages::PackageFile::Generate { path: Some(path) },
303 FileContents::Generated(
304 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
305 ) => messages::PackageFile::Generate { path: None },
306 };
307 (f.rel_path, file)
308 })),
309 };
310 let _ = ws.gctx().shell().print_json(&message);
311 }
312 }
313 } else {
314 let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?;
315 if let Some(local_reg) = local_reg.as_mut() {
316 if pkg.publish() != &Some(Vec::new()) {
317 local_reg.add_package(ws, &pkg, &tarball)?;
318 }
319 }
320 outputs.push((pkg, opts, tarball));
321 }
322 }
323
324 if opts.verify {
327 for (pkg, opts, tarball) in &outputs {
328 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
329 .context("failed to verify package tarball")?
330 }
331 }
332
333 Ok(outputs)
334}
335
336fn get_registry(
343 gctx: &GlobalContext,
344 pkgs: &[&Package],
345 reg_or_index: Option<RegistryOrIndex>,
346) -> CargoResult<SourceId> {
347 let reg_or_index = match reg_or_index.clone() {
348 Some(r) => Some(r),
349 None => infer_registry(pkgs)?,
350 };
351
352 let reg = reg_or_index
354 .clone()
355 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
356 if let RegistryOrIndex::Registry(reg_name) = reg {
357 for pkg in pkgs {
358 if let Some(allowed) = pkg.publish().as_ref() {
359 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
363 bail!(
364 "`{}` cannot be packaged.\n\
365 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
366 pkg.name(),
367 reg_name
368 );
369 }
370 }
371 }
372 }
373 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
374}
375
376#[derive(Clone, Debug, Default)]
378pub(crate) struct LocalDependencies<T> {
379 pub packages: HashMap<PackageId, (Package, T)>,
380 pub graph: Graph<PackageId, ()>,
381}
382
383impl<T: Clone> LocalDependencies<T> {
384 pub fn sort(&self) -> Vec<(Package, T)> {
385 self.graph
386 .sort()
387 .into_iter()
388 .map(|name| self.packages[&name].clone())
389 .collect()
390 }
391
392 pub fn has_dependencies(&self) -> bool {
393 self.graph
394 .iter()
395 .any(|node| self.graph.edges(node).next().is_some())
396 }
397}
398
399fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
404 let packages: HashMap<PackageId, (Package, T)> = packages
405 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
406 .collect();
407
408 let source_to_pkg: HashMap<_, _> = packages
413 .keys()
414 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
415 .collect();
416
417 let mut graph = Graph::new();
418 for (pkg, _payload) in packages.values() {
419 graph.add(pkg.package_id());
420 for dep in pkg.dependencies() {
421 if !dep.source_id().is_path() {
423 continue;
424 }
425
426 if dep.kind() == DepKind::Development && !dep.specified_req() {
429 continue;
430 };
431
432 if dep.source_id() == pkg.package_id().source_id() {
434 continue;
435 }
436
437 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
438 graph.link(pkg.package_id(), *dep_pkg);
439 }
440 }
441 }
442
443 LocalDependencies { packages, graph }
444}
445
446#[tracing::instrument(skip_all)]
448fn prepare_archive(
449 ws: &Workspace<'_>,
450 pkg: &Package,
451 opts: &PackageOpts<'_>,
452) -> CargoResult<Vec<ArchiveFile>> {
453 let gctx = ws.gctx();
454 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
455 src.load()?;
456
457 if opts.check_metadata {
458 check_metadata(pkg, gctx)?;
459 }
460
461 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
462 gctx.shell().warn(
463 "both package.include and package.exclude are specified; \
464 the exclude list will be ignored",
465 )?;
466 }
467 let src_files = src.list_files(pkg)?;
468
469 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
471
472 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
473}
474
475#[tracing::instrument(skip_all)]
477fn build_ar_list(
478 ws: &Workspace<'_>,
479 pkg: &Package,
480 src_files: Vec<PathEntry>,
481 vcs_info: Option<vcs::VcsInfo>,
482 include_lockfile: bool,
483) -> CargoResult<Vec<ArchiveFile>> {
484 let mut result = HashMap::new();
485 let root = pkg.root();
486 for src_file in &src_files {
487 let rel_path = src_file.strip_prefix(&root)?;
488 check_filename(rel_path, &mut ws.gctx().shell())?;
489 let rel_str = rel_path.to_str().ok_or_else(|| {
490 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
491 })?;
492 match rel_str {
493 "Cargo.lock" => continue,
494 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
495 "invalid inclusion of reserved file name {} in package source",
496 rel_str
497 ),
498 _ => {
499 result
500 .entry(UncasedAscii::new(rel_str))
501 .or_insert_with(Vec::new)
502 .push(ArchiveFile {
503 rel_path: rel_path.to_owned(),
504 rel_str: rel_str.to_owned(),
505 contents: FileContents::OnDisk(src_file.to_path_buf()),
506 });
507 }
508 }
509 }
510
511 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
514 result
515 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
516 .or_insert_with(Vec::new)
517 .push(ArchiveFile {
518 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
519 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
520 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
521 });
522 result
523 .entry(UncasedAscii::new("Cargo.toml"))
524 .or_insert_with(Vec::new)
525 .push(ArchiveFile {
526 rel_path: PathBuf::from("Cargo.toml"),
527 rel_str: "Cargo.toml".to_string(),
528 contents: FileContents::Generated(GeneratedFile::Manifest(
529 pkg.manifest_path().to_owned(),
530 )),
531 });
532 } else {
533 ws.gctx().shell().warn(&format!(
534 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
535 pkg.name()
536 ))?;
537 }
538
539 if include_lockfile {
540 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
541 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
542 let rel_str = "Cargo.lock";
543 result
544 .entry(UncasedAscii::new(rel_str))
545 .or_insert_with(Vec::new)
546 .push(ArchiveFile {
547 rel_path: PathBuf::from(rel_str),
548 rel_str: rel_str.to_string(),
549 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
550 });
551 }
552
553 if let Some(vcs_info) = vcs_info {
554 let rel_str = VCS_INFO_FILE;
555 result
556 .entry(UncasedAscii::new(rel_str))
557 .or_insert_with(Vec::new)
558 .push(ArchiveFile {
559 rel_path: PathBuf::from(rel_str),
560 rel_str: rel_str.to_string(),
561 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
562 });
563 }
564
565 let mut invalid_manifest_field: Vec<String> = vec![];
566
567 let mut result = result.into_values().flatten().collect();
568 if let Some(license_file) = &pkg.manifest().metadata().license_file {
569 let license_path = Path::new(license_file);
570 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
571 if abs_file_path.is_file() {
572 check_for_file_and_add(
573 "license-file",
574 license_path,
575 abs_file_path,
576 pkg,
577 &mut result,
578 ws,
579 )?;
580 } else {
581 error_on_nonexistent_file(
582 &pkg,
583 &license_path,
584 "license-file",
585 &mut invalid_manifest_field,
586 );
587 }
588 }
589 if let Some(readme) = &pkg.manifest().metadata().readme {
590 let readme_path = Path::new(readme);
591 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
592 if abs_file_path.is_file() {
593 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
594 } else {
595 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
596 }
597 }
598
599 if !invalid_manifest_field.is_empty() {
600 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
601 }
602
603 for t in pkg
604 .manifest()
605 .targets()
606 .iter()
607 .filter(|t| t.is_custom_build())
608 {
609 if let Some(custome_build_path) = t.src_path().path() {
610 let abs_custome_build_path =
611 paths::normalize_path(&pkg.root().join(custome_build_path));
612 if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
613 {
614 error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
615 }
616 }
617 }
618
619 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
620
621 Ok(result)
622}
623
624fn check_for_file_and_add(
625 label: &str,
626 file_path: &Path,
627 abs_file_path: PathBuf,
628 pkg: &Package,
629 result: &mut Vec<ArchiveFile>,
630 ws: &Workspace<'_>,
631) -> CargoResult<()> {
632 match abs_file_path.strip_prefix(&pkg.root()) {
633 Ok(rel_file_path) => {
634 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
635 result.push(ArchiveFile {
636 rel_path: rel_file_path.to_path_buf(),
637 rel_str: rel_file_path
638 .to_str()
639 .expect("everything was utf8")
640 .to_string(),
641 contents: FileContents::OnDisk(abs_file_path),
642 })
643 }
644 }
645 Err(_) => {
646 let file_name = file_path.file_name().unwrap();
648 if result.iter().any(|ar| ar.rel_path == file_name) {
649 ws.gctx().shell().warn(&format!(
650 "{} `{}` appears to be a path outside of the package, \
651 but there is already a file named `{}` in the root of the package. \
652 The archived crate will contain the copy in the root of the package. \
653 Update the {} to point to the path relative \
654 to the root of the package to remove this warning.",
655 label,
656 file_path.display(),
657 file_name.to_str().unwrap(),
658 label,
659 ))?;
660 } else {
661 result.push(ArchiveFile {
662 rel_path: PathBuf::from(file_name),
663 rel_str: file_name.to_str().unwrap().to_string(),
664 contents: FileContents::OnDisk(abs_file_path),
665 })
666 }
667 }
668 }
669 Ok(())
670}
671
672fn error_on_nonexistent_file(
673 pkg: &Package,
674 path: &Path,
675 manifest_key_name: &'static str,
676 invalid: &mut Vec<String>,
677) {
678 let rel_msg = if path.is_absolute() {
679 "".to_string()
680 } else {
681 format!(" (relative to `{}`)", pkg.root().display())
682 };
683
684 let msg = format!(
685 "{manifest_key_name} `{}` does not appear to exist{}.\n\
686 Please update the {manifest_key_name} setting in the manifest at `{}`.",
687 path.display(),
688 rel_msg,
689 pkg.manifest_path().display()
690 );
691
692 invalid.push(msg);
693}
694
695fn error_custom_build_file_not_in_package(
696 pkg: &Package,
697 path: &Path,
698 target: &Target,
699) -> CargoResult<Vec<ArchiveFile>> {
700 let tip = {
701 let description_name = target.description_named();
702 if path.is_file() {
703 format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
704 It is at `{}`, whereas the root the package is `{}`.\n",
705 path.display(), pkg.root().display()
706 )
707 } else {
708 format!("the source file of {description_name} doesn't appear to exist.\n",)
709 }
710 };
711 let msg = format!(
712 "{}\
713 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
714 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
715 tip, pkg.manifest_path().display()
716 );
717 anyhow::bail!(msg)
718}
719
720fn build_lock(
722 ws: &Workspace<'_>,
723 publish_pkg: &Package,
724 local_reg: Option<&TmpRegistry<'_>>,
725) -> CargoResult<String> {
726 let gctx = ws.gctx();
727 let orig_resolve = ops::load_pkg_lockfile(ws)?;
728
729 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
730
731 if let Some(local_reg) = local_reg {
735 tmp_ws.add_local_overlay(
736 local_reg.upstream,
737 local_reg.root.as_path_unlocked().to_owned(),
738 );
739 }
740 let mut tmp_reg = tmp_ws.package_registry()?;
741
742 let mut new_resolve = ops::resolve_with_previous(
743 &mut tmp_reg,
744 &tmp_ws,
745 &CliFeatures::new_all(true),
746 HasDevUnits::Yes,
747 orig_resolve.as_ref(),
748 None,
749 &[],
750 true,
751 )?;
752
753 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
754
755 if let Some(orig_resolve) = orig_resolve {
756 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
757 }
758 check_yanked(
759 gctx,
760 &pkg_set,
761 &new_resolve,
762 "consider updating to a version that is not yanked",
763 )?;
764
765 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
766}
767
768fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
771 let md = pkg.manifest().metadata();
772
773 let mut missing = vec![];
774
775 macro_rules! lacking {
776 ($( $($field: ident)||* ),*) => {{
777 $(
778 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
779 $(missing.push(stringify!($field).replace("_", "-"));)*
780 }
781 )*
782 }}
783 }
784 lacking!(
785 description,
786 license || license_file,
787 documentation || homepage || repository
788 );
789
790 if !missing.is_empty() {
791 let mut things = missing[..missing.len() - 1].join(", ");
792 if !things.is_empty() {
795 things.push_str(" or ");
796 }
797 things.push_str(missing.last().unwrap());
798
799 gctx.shell().warn(&format!(
800 "manifest has no {things}.\n\
801 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
802 things = things
803 ))?
804 }
805
806 Ok(())
807}
808
809fn tar(
813 ws: &Workspace<'_>,
814 pkg: &Package,
815 local_reg: Option<&TmpRegistry<'_>>,
816 ar_files: Vec<ArchiveFile>,
817 dst: &File,
818 filename: &str,
819) -> CargoResult<u64> {
820 let filename = Path::new(filename);
822 let encoder = GzBuilder::new()
823 .filename(paths::path2bytes(filename)?)
824 .write(dst, Compression::best());
825
826 let mut ar = Builder::new(encoder);
828 ar.sparse(false);
829 let gctx = ws.gctx();
830
831 let base_name = format!("{}-{}", pkg.name(), pkg.version());
832 let base_path = Path::new(&base_name);
833 let included = ar_files
834 .iter()
835 .map(|ar_file| ar_file.rel_path.clone())
836 .collect::<Vec<_>>();
837 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
838
839 let mut uncompressed_size = 0;
840 for ar_file in ar_files {
841 let ArchiveFile {
842 rel_path,
843 rel_str,
844 contents,
845 } = ar_file;
846 let ar_path = base_path.join(&rel_path);
847 gctx.shell()
848 .verbose(|shell| shell.status("Archiving", &rel_str))?;
849 let mut header = Header::new_gnu();
850 match contents {
851 FileContents::OnDisk(disk_path) => {
852 let mut file = File::open(&disk_path).with_context(|| {
853 format!("failed to open for archiving: `{}`", disk_path.display())
854 })?;
855 let metadata = file.metadata().with_context(|| {
856 format!("could not learn metadata for: `{}`", disk_path.display())
857 })?;
858 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
859 header.set_cksum();
860 ar.append_data(&mut header, &ar_path, &mut file)
861 .with_context(|| {
862 format!("could not archive source file `{}`", disk_path.display())
863 })?;
864 uncompressed_size += metadata.len() as u64;
865 }
866 FileContents::Generated(generated_kind) => {
867 let contents = match generated_kind {
868 GeneratedFile::Manifest(_) => {
869 publish_pkg.manifest().to_normalized_contents()?
870 }
871 GeneratedFile::Lockfile(_) => build_lock(ws, &publish_pkg, local_reg)?,
872 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
873 };
874 header.set_entry_type(EntryType::file());
875 header.set_mode(0o644);
876 header.set_size(contents.len() as u64);
877 header.set_mtime(1);
879 header.set_cksum();
880 ar.append_data(&mut header, &ar_path, contents.as_bytes())
881 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
882 uncompressed_size += contents.len() as u64;
883 }
884 }
885 }
886
887 let encoder = ar.into_inner()?;
888 encoder.finish()?;
889 Ok(uncompressed_size)
890}
891
892fn compare_resolve(
894 gctx: &GlobalContext,
895 current_pkg: &Package,
896 orig_resolve: &Resolve,
897 new_resolve: &Resolve,
898) -> CargoResult<()> {
899 if gctx.shell().verbosity() != Verbosity::Verbose {
900 return Ok(());
901 }
902 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
903 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
904 let added = new_set.difference(&orig_set);
905 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
908 for pkg_id in added {
909 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
910 continue;
913 }
914 let removed_candidates: Vec<&PackageId> = removed
917 .iter()
918 .filter(|orig_pkg_id| {
919 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
920 })
921 .cloned()
922 .collect();
923 let extra = match removed_candidates.len() {
924 0 => {
925 let previous_versions: Vec<&PackageId> = removed
927 .iter()
928 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
929 .cloned()
930 .collect();
931 match previous_versions.len() {
932 0 => String::new(),
933 1 => format!(
934 ", previous version was `{}`",
935 previous_versions[0].version()
936 ),
937 _ => format!(
938 ", previous versions were: {}",
939 previous_versions
940 .iter()
941 .map(|pkg_id| format!("`{}`", pkg_id.version()))
942 .collect::<Vec<_>>()
943 .join(", ")
944 ),
945 }
946 }
947 1 => {
948 format!(
952 ", was originally sourced from `{}`",
953 removed_candidates[0].source_id()
954 )
955 }
956 _ => {
957 let comma_list = removed_candidates
960 .iter()
961 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
962 .collect::<Vec<_>>()
963 .join(", ");
964 format!(
965 ", was originally sourced from one of these sources: {}",
966 comma_list
967 )
968 }
969 };
970 let msg = format!(
971 "package `{}` added to the packaged Cargo.lock file{}",
972 pkg_id, extra
973 );
974 gctx.shell().note(msg)?;
975 }
976 Ok(())
977}
978
979pub fn check_yanked(
980 gctx: &GlobalContext,
981 pkg_set: &PackageSet<'_>,
982 resolve: &Resolve,
983 hint: &str,
984) -> CargoResult<()> {
985 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
988
989 let mut sources = pkg_set.sources_mut();
990 let mut pending: Vec<PackageId> = resolve.iter().collect();
991 let mut results = Vec::new();
992 for (_id, source) in sources.sources_mut() {
993 source.invalidate_cache();
994 }
995 while !pending.is_empty() {
996 pending.retain(|pkg_id| {
997 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
998 match source.is_yanked(*pkg_id) {
999 Poll::Ready(result) => results.push((*pkg_id, result)),
1000 Poll::Pending => return true,
1001 }
1002 }
1003 false
1004 });
1005 for (_id, source) in sources.sources_mut() {
1006 source.block_until_ready()?;
1007 }
1008 }
1009
1010 for (pkg_id, is_yanked) in results {
1011 if is_yanked? {
1012 gctx.shell().warn(format!(
1013 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1014 pkg_id,
1015 pkg_id.source_id().display_registry_name(),
1016 hint
1017 ))?;
1018 }
1019 }
1020 Ok(())
1021}
1022
1023fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1030 let Some(name) = file.file_name() else {
1031 return Ok(());
1032 };
1033 let Some(name) = name.to_str() else {
1034 anyhow::bail!(
1035 "path does not have a unicode filename which may not unpack \
1036 on all platforms: {}",
1037 file.display()
1038 )
1039 };
1040 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1041 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1042 anyhow::bail!(
1043 "cannot package a filename with a special character `{}`: {}",
1044 c,
1045 file.display()
1046 )
1047 }
1048 if restricted_names::is_windows_reserved_path(file) {
1049 shell.warn(format!(
1050 "file {} is a reserved Windows filename, \
1051 it will not work on Windows platforms",
1052 file.display()
1053 ))?;
1054 }
1055 Ok(())
1056}
1057
1058struct TmpRegistry<'a> {
1062 gctx: &'a GlobalContext,
1063 upstream: SourceId,
1064 root: Filesystem,
1065 _lock: FileLock,
1066}
1067
1068impl<'a> TmpRegistry<'a> {
1069 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1070 root.create_dir()?;
1071 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1072 let slf = Self {
1073 gctx,
1074 root,
1075 upstream,
1076 _lock,
1077 };
1078 let index_path = slf.index_path().into_path_unlocked();
1080 if index_path.exists() {
1081 paths::remove_dir_all(index_path)?;
1082 }
1083 slf.index_path().create_dir()?;
1084 Ok(slf)
1085 }
1086
1087 fn index_path(&self) -> Filesystem {
1088 self.root.join("index")
1089 }
1090
1091 fn add_package(
1092 &mut self,
1093 ws: &Workspace<'_>,
1094 package: &Package,
1095 tar: &FileLock,
1096 ) -> CargoResult<()> {
1097 debug!(
1098 "adding package {}@{} to local overlay at {}",
1099 package.name(),
1100 package.version(),
1101 self.root.as_path_unlocked().display()
1102 );
1103 {
1104 let mut tar_copy = self.root.open_rw_exclusive_create(
1105 package.package_id().tarball_name(),
1106 self.gctx,
1107 "temporary package registry",
1108 )?;
1109 tar.file().seek(SeekFrom::Start(0))?;
1110 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1111 tar_copy.flush()?;
1112 }
1113
1114 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1115
1116 tar.file().seek(SeekFrom::Start(0))?;
1117 let cksum = cargo_util::Sha256::new()
1118 .update_file(tar.file())?
1119 .finish_hex();
1120
1121 let deps: Vec<_> = new_crate
1122 .deps
1123 .into_iter()
1124 .map(|dep| {
1125 let name = dep
1126 .explicit_name_in_toml
1127 .clone()
1128 .unwrap_or_else(|| dep.name.clone())
1129 .into();
1130 let package = dep
1131 .explicit_name_in_toml
1132 .as_ref()
1133 .map(|_| dep.name.clone().into());
1134 RegistryDependency {
1135 name: name,
1136 req: dep.version_req.into(),
1137 features: dep.features.into_iter().map(|x| x.into()).collect(),
1138 optional: dep.optional,
1139 default_features: dep.default_features,
1140 target: dep.target.map(|x| x.into()),
1141 kind: Some(dep.kind.into()),
1142 registry: dep.registry.map(|x| x.into()),
1143 package: package,
1144 public: None,
1145 artifact: dep
1146 .artifact
1147 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1148 bindep_target: dep.bindep_target.map(|x| x.into()),
1149 lib: dep.lib,
1150 }
1151 })
1152 .collect();
1153
1154 let index_line = serde_json::to_string(&IndexPackage {
1155 name: new_crate.name.into(),
1156 vers: package.version().clone(),
1157 deps,
1158 features: new_crate
1159 .features
1160 .into_iter()
1161 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1162 .collect(),
1163 features2: None,
1164 cksum,
1165 yanked: None,
1166 links: new_crate.links.map(|x| x.into()),
1167 rust_version: None,
1168 v: Some(2),
1169 })?;
1170
1171 let file =
1172 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1173 let mut dst = self.index_path().open_rw_exclusive_create(
1174 file,
1175 self.gctx,
1176 "temporary package registry",
1177 )?;
1178 dst.write_all(index_line.as_bytes())?;
1179 Ok(())
1180 }
1181}