cargo/ops/cargo_package/
mod.rs

1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::{self, File};
5use std::io::prelude::*;
6use std::io::SeekFrom;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::dependency::DepKind;
11use crate::core::manifest::Target;
12use crate::core::resolver::CliFeatures;
13use crate::core::resolver::HasDevUnits;
14use crate::core::PackageIdSpecQuery;
15use crate::core::Shell;
16use crate::core::Verbosity;
17use crate::core::Workspace;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{infer_registry, RegistryOrIndex};
21use crate::sources::path::PathEntry;
22use crate::sources::registry::index::{IndexPackage, RegistryDependency};
23use crate::sources::{PathSource, CRATES_IO_REGISTRY};
24use crate::util::cache_lock::CacheLockMode;
25use crate::util::context::JobsConfig;
26use crate::util::errors::CargoResult;
27use crate::util::restricted_names;
28use crate::util::toml::prepare_for_publish;
29use crate::util::FileLock;
30use crate::util::Filesystem;
31use crate::util::GlobalContext;
32use crate::util::Graph;
33use crate::util::HumanBytes;
34use crate::{drop_println, ops};
35use anyhow::{bail, Context as _};
36use cargo_util::paths;
37use cargo_util_schemas::messages;
38use flate2::{Compression, GzBuilder};
39use tar::{Builder, EntryType, Header, HeaderMode};
40use tracing::debug;
41use unicase::Ascii as UncasedAscii;
42
43mod vcs;
44mod verify;
45
46/// Message format for `cargo package`.
47///
48/// Currently only affect the output of the `--list` flag.
49#[derive(Debug, Clone)]
50pub enum PackageMessageFormat {
51    Human,
52    Json,
53}
54
55impl PackageMessageFormat {
56    pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
57
58    pub const DEFAULT: &str = "human";
59}
60
61impl std::str::FromStr for PackageMessageFormat {
62    type Err = anyhow::Error;
63
64    fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
65        match s {
66            "human" => Ok(PackageMessageFormat::Human),
67            "json" => Ok(PackageMessageFormat::Json),
68            f => bail!("unknown message format `{f}`"),
69        }
70    }
71}
72
73#[derive(Clone)]
74pub struct PackageOpts<'gctx> {
75    pub gctx: &'gctx GlobalContext,
76    pub list: bool,
77    pub fmt: PackageMessageFormat,
78    pub check_metadata: bool,
79    pub allow_dirty: bool,
80    pub include_lockfile: bool,
81    pub verify: bool,
82    pub jobs: Option<JobsConfig>,
83    pub keep_going: bool,
84    pub to_package: ops::Packages,
85    pub targets: Vec<String>,
86    pub cli_features: CliFeatures,
87    pub reg_or_index: Option<ops::RegistryOrIndex>,
88}
89
90const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
91const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
92
93struct ArchiveFile {
94    /// The relative path in the archive (not including the top-level package
95    /// name directory).
96    rel_path: PathBuf,
97    /// String variant of `rel_path`, for convenience.
98    rel_str: String,
99    /// The contents to add to the archive.
100    contents: FileContents,
101}
102
103enum FileContents {
104    /// Absolute path to the file on disk to add to the archive.
105    OnDisk(PathBuf),
106    /// Generates a file.
107    Generated(GeneratedFile),
108}
109
110enum GeneratedFile {
111    /// Generates `Cargo.toml` by rewriting the original.
112    ///
113    /// Associated path is the original manifest path.
114    Manifest(PathBuf),
115    /// Generates `Cargo.lock`.
116    ///
117    /// Associated path is the path to the original lock file, if existing.
118    Lockfile(Option<PathBuf>),
119    /// Adds a `.cargo_vcs_info.json` file if in a git repo.
120    VcsInfo(vcs::VcsInfo),
121}
122
123// Builds a tarball and places it in the output directory.
124#[tracing::instrument(skip_all)]
125fn create_package(
126    ws: &Workspace<'_>,
127    pkg: &Package,
128    ar_files: Vec<ArchiveFile>,
129    local_reg: Option<&TmpRegistry<'_>>,
130) -> CargoResult<FileLock> {
131    let gctx = ws.gctx();
132    let filecount = ar_files.len();
133
134    // Check that the package dependencies are safe to deploy.
135    for dep in pkg.dependencies() {
136        super::check_dep_has_version(dep, false)?;
137    }
138
139    let filename = pkg.package_id().tarball_name();
140    let dir = ws.target_dir().join("package");
141    let mut dst = {
142        let tmp = format!(".{}", filename);
143        dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
144    };
145
146    // Package up and test a temporary tarball and only move it to the final
147    // location if it actually passes all our tests. Any previously existing
148    // tarball can be assumed as corrupt or invalid, so we just blow it away if
149    // it exists.
150    gctx.shell()
151        .status("Packaging", pkg.package_id().to_string())?;
152    dst.file().set_len(0)?;
153    let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
154        .context("failed to prepare local package for uploading")?;
155
156    dst.seek(SeekFrom::Start(0))?;
157    let src_path = dst.path();
158    let dst_path = dst.parent().join(&filename);
159    fs::rename(&src_path, &dst_path)
160        .context("failed to move temporary tarball into final location")?;
161
162    let dst_metadata = dst
163        .file()
164        .metadata()
165        .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
166    let compressed_size = dst_metadata.len();
167
168    let uncompressed = HumanBytes(uncompressed_size);
169    let compressed = HumanBytes(compressed_size);
170
171    let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
172    // It doesn't really matter if this fails.
173    drop(gctx.shell().status("Packaged", message));
174
175    return Ok(dst);
176}
177
178/// Packages an entire workspace.
179///
180/// Returns the generated package files. If `opts.list` is true, skips
181/// generating package files and returns an empty list.
182pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
183    let specs = &opts.to_package.to_package_id_specs(ws)?;
184    // If -p is used, we should check spec is matched with the members (See #13719)
185    if let ops::Packages::Packages(_) = opts.to_package {
186        for spec in specs.iter() {
187            let member_ids = ws.members().map(|p| p.package_id());
188            spec.query(member_ids)?;
189        }
190    }
191    let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
192
193    // In `members_with_features_old`, it will add "current" package (determined by the cwd)
194    // So we need filter
195    pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
196
197    Ok(do_package(ws, opts, pkgs)?
198        .into_iter()
199        .map(|x| x.2)
200        .collect())
201}
202
203/// Packages an entire workspace.
204///
205/// Returns the generated package files and the dependencies between them. If
206/// `opts.list` is true, skips generating package files and returns an empty
207/// list.
208pub(crate) fn package_with_dep_graph(
209    ws: &Workspace<'_>,
210    opts: &PackageOpts<'_>,
211    pkgs: Vec<(&Package, CliFeatures)>,
212) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
213    let output = do_package(ws, opts, pkgs)?;
214
215    Ok(local_deps(output.into_iter().map(
216        |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
217    )))
218}
219
220fn do_package<'a>(
221    ws: &Workspace<'_>,
222    opts: &PackageOpts<'a>,
223    pkgs: Vec<(&Package, CliFeatures)>,
224) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
225    if ws
226        .lock_root()
227        .as_path_unlocked()
228        .join(LOCKFILE_NAME)
229        .exists()
230        && opts.include_lockfile
231    {
232        // Make sure the Cargo.lock is up-to-date and valid.
233        let dry_run = false;
234        let _ = ops::resolve_ws(ws, dry_run)?;
235        // If Cargo.lock does not exist, it will be generated by `build_lock`
236        // below, and will be validated during the verification step.
237    }
238
239    let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
240    let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
241
242    // The publish registry doesn't matter unless there are local dependencies,
243    // so only try to get one if we need it. If they explicitly passed a
244    // registry on the CLI, we check it no matter what.
245    let sid = if deps.has_no_dependencies() && opts.reg_or_index.is_none() {
246        None
247    } else {
248        let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
249        debug!("packaging for registry {}", sid);
250        Some(sid)
251    };
252
253    let mut local_reg = if ws.gctx().cli_unstable().package_workspace {
254        let reg_dir = ws.build_dir().join("package").join("tmp-registry");
255        sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
256            .transpose()?
257    } else {
258        None
259    };
260
261    // Packages need to be created in dependency order, because dependencies must
262    // be added to our local overlay before we can create lockfiles that depend on them.
263    let sorted_pkgs = deps.sort();
264    let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
265    for (pkg, cli_features) in sorted_pkgs {
266        let opts = PackageOpts {
267            cli_features: cli_features.clone(),
268            to_package: ops::Packages::Default,
269            ..opts.clone()
270        };
271        let ar_files = prepare_archive(ws, &pkg, &opts)?;
272
273        if opts.list {
274            match opts.fmt {
275                PackageMessageFormat::Human => {
276                    // While this form is called "human",
277                    // it keeps the old file-per-line format for compatibility.
278                    for ar_file in &ar_files {
279                        drop_println!(ws.gctx(), "{}", ar_file.rel_str);
280                    }
281                }
282                PackageMessageFormat::Json => {
283                    let message = messages::PackageList {
284                        id: pkg.package_id().to_spec(),
285                        files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
286                            let file = match f.contents {
287                                FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
288                                FileContents::Generated(
289                                    GeneratedFile::Manifest(path)
290                                    | GeneratedFile::Lockfile(Some(path)),
291                                ) => messages::PackageFile::Generate { path: Some(path) },
292                                FileContents::Generated(
293                                    GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
294                                ) => messages::PackageFile::Generate { path: None },
295                            };
296                            (f.rel_path, file)
297                        })),
298                    };
299                    let _ = ws.gctx().shell().print_json(&message);
300                }
301            }
302        } else {
303            let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?;
304            if let Some(local_reg) = local_reg.as_mut() {
305                if pkg.publish() != &Some(Vec::new()) {
306                    local_reg.add_package(ws, &pkg, &tarball)?;
307                }
308            }
309            outputs.push((pkg, opts, tarball));
310        }
311    }
312
313    // Verify all packages in the workspace. This can be done in any order, since the dependencies
314    // are already all in the local registry overlay.
315    if opts.verify {
316        for (pkg, opts, tarball) in &outputs {
317            verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
318                .context("failed to verify package tarball")?
319        }
320    }
321
322    Ok(outputs)
323}
324
325/// Determine which registry the packages are for.
326///
327/// The registry only affects the built packages if there are dependencies within the
328/// packages that we're packaging: if we're packaging foo-bin and foo-lib, and foo-bin
329/// depends on foo-lib, then the foo-lib entry in foo-bin's lockfile will depend on the
330/// registry that we're building packages for.
331fn get_registry(
332    gctx: &GlobalContext,
333    pkgs: &[&Package],
334    reg_or_index: Option<RegistryOrIndex>,
335) -> CargoResult<SourceId> {
336    let reg_or_index = match reg_or_index.clone() {
337        Some(r) => Some(r),
338        None => infer_registry(pkgs)?,
339    };
340
341    // Validate the registry against the packages' allow-lists.
342    let reg = reg_or_index
343        .clone()
344        .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
345    if let RegistryOrIndex::Registry(reg_name) = reg {
346        for pkg in pkgs {
347            if let Some(allowed) = pkg.publish().as_ref() {
348                // If allowed is empty (i.e. package.publish is false), we let it slide.
349                // This allows packaging unpublishable packages (although packaging might
350                // fail later if the unpublishable package is a dependency of something else).
351                if !allowed.is_empty() && !allowed.iter().any(|a| a == &reg_name) {
352                    bail!(
353                        "`{}` cannot be packaged.\n\
354                         The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
355                        pkg.name(),
356                        reg_name
357                    );
358                }
359            }
360        }
361    }
362    Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
363}
364
365/// Just the part of the dependency graph that's between the packages we're packaging.
366#[derive(Clone, Debug, Default)]
367pub(crate) struct LocalDependencies<T> {
368    pub packages: HashMap<PackageId, (Package, T)>,
369    pub graph: Graph<PackageId, ()>,
370}
371
372impl<T: Clone> LocalDependencies<T> {
373    pub fn sort(&self) -> Vec<(Package, T)> {
374        self.graph
375            .sort()
376            .into_iter()
377            .map(|name| self.packages[&name].clone())
378            .collect()
379    }
380
381    pub fn has_no_dependencies(&self) -> bool {
382        self.graph
383            .iter()
384            .all(|node| self.graph.edges(node).next().is_none())
385    }
386}
387
388/// Build just the part of the dependency graph that's between the given packages,
389/// ignoring dev dependencies.
390///
391/// We assume that the packages all belong to this workspace.
392fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
393    let packages: HashMap<PackageId, (Package, T)> = packages
394        .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
395        .collect();
396
397    // Dependencies have source ids but not package ids. We draw an edge
398    // whenever a dependency's source id matches one of our packages. This is
399    // wrong in general because it doesn't require (e.g.) versions to match. But
400    // since we're working only with path dependencies here, it should be fine.
401    let source_to_pkg: HashMap<_, _> = packages
402        .keys()
403        .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
404        .collect();
405
406    let mut graph = Graph::new();
407    for (pkg, _payload) in packages.values() {
408        graph.add(pkg.package_id());
409        for dep in pkg.dependencies() {
410            // Ignore local dev-dependencies because they aren't needed for intra-workspace
411            // lockfile generation or verification as they get stripped on publish.
412            if dep.kind() == DepKind::Development || !dep.source_id().is_path() {
413                continue;
414            };
415
416            if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
417                graph.link(pkg.package_id(), *dep_pkg);
418            }
419        }
420    }
421
422    LocalDependencies { packages, graph }
423}
424
425/// Performs pre-archiving checks and builds a list of files to archive.
426#[tracing::instrument(skip_all)]
427fn prepare_archive(
428    ws: &Workspace<'_>,
429    pkg: &Package,
430    opts: &PackageOpts<'_>,
431) -> CargoResult<Vec<ArchiveFile>> {
432    let gctx = ws.gctx();
433    let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
434    src.load()?;
435
436    if opts.check_metadata {
437        check_metadata(pkg, gctx)?;
438    }
439
440    if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
441        gctx.shell().warn(
442            "both package.include and package.exclude are specified; \
443             the exclude list will be ignored",
444        )?;
445    }
446    let src_files = src.list_files(pkg)?;
447
448    // Check (git) repository state, getting the current commit hash.
449    let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
450
451    build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
452}
453
454/// Builds list of files to archive.
455#[tracing::instrument(skip_all)]
456fn build_ar_list(
457    ws: &Workspace<'_>,
458    pkg: &Package,
459    src_files: Vec<PathEntry>,
460    vcs_info: Option<vcs::VcsInfo>,
461    include_lockfile: bool,
462) -> CargoResult<Vec<ArchiveFile>> {
463    let mut result = HashMap::new();
464    let root = pkg.root();
465    for src_file in &src_files {
466        let rel_path = src_file.strip_prefix(&root)?;
467        check_filename(rel_path, &mut ws.gctx().shell())?;
468        let rel_str = rel_path.to_str().ok_or_else(|| {
469            anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
470        })?;
471        match rel_str {
472            "Cargo.lock" => continue,
473            VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
474                "invalid inclusion of reserved file name {} in package source",
475                rel_str
476            ),
477            _ => {
478                result
479                    .entry(UncasedAscii::new(rel_str))
480                    .or_insert_with(Vec::new)
481                    .push(ArchiveFile {
482                        rel_path: rel_path.to_owned(),
483                        rel_str: rel_str.to_owned(),
484                        contents: FileContents::OnDisk(src_file.to_path_buf()),
485                    });
486            }
487        }
488    }
489
490    // Ensure we normalize for case insensitive filesystems (like on Windows) by removing the
491    // existing entry, regardless of case, and adding in with the correct case
492    if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
493        result
494            .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
495            .or_insert_with(Vec::new)
496            .push(ArchiveFile {
497                rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
498                rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
499                contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
500            });
501        result
502            .entry(UncasedAscii::new("Cargo.toml"))
503            .or_insert_with(Vec::new)
504            .push(ArchiveFile {
505                rel_path: PathBuf::from("Cargo.toml"),
506                rel_str: "Cargo.toml".to_string(),
507                contents: FileContents::Generated(GeneratedFile::Manifest(
508                    pkg.manifest_path().to_owned(),
509                )),
510            });
511    } else {
512        ws.gctx().shell().warn(&format!(
513            "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
514            pkg.name()
515        ))?;
516    }
517
518    if include_lockfile {
519        let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
520        let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
521        let rel_str = "Cargo.lock";
522        result
523            .entry(UncasedAscii::new(rel_str))
524            .or_insert_with(Vec::new)
525            .push(ArchiveFile {
526                rel_path: PathBuf::from(rel_str),
527                rel_str: rel_str.to_string(),
528                contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
529            });
530    }
531
532    if let Some(vcs_info) = vcs_info {
533        let rel_str = VCS_INFO_FILE;
534        result
535            .entry(UncasedAscii::new(rel_str))
536            .or_insert_with(Vec::new)
537            .push(ArchiveFile {
538                rel_path: PathBuf::from(rel_str),
539                rel_str: rel_str.to_string(),
540                contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
541            });
542    }
543
544    let mut invalid_manifest_field: Vec<String> = vec![];
545
546    let mut result = result.into_values().flatten().collect();
547    if let Some(license_file) = &pkg.manifest().metadata().license_file {
548        let license_path = Path::new(license_file);
549        let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
550        if abs_file_path.is_file() {
551            check_for_file_and_add(
552                "license-file",
553                license_path,
554                abs_file_path,
555                pkg,
556                &mut result,
557                ws,
558            )?;
559        } else {
560            error_on_nonexistent_file(
561                &pkg,
562                &license_path,
563                "license-file",
564                &mut invalid_manifest_field,
565            );
566        }
567    }
568    if let Some(readme) = &pkg.manifest().metadata().readme {
569        let readme_path = Path::new(readme);
570        let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
571        if abs_file_path.is_file() {
572            check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
573        } else {
574            error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
575        }
576    }
577
578    if !invalid_manifest_field.is_empty() {
579        return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
580    }
581
582    for t in pkg
583        .manifest()
584        .targets()
585        .iter()
586        .filter(|t| t.is_custom_build())
587    {
588        if let Some(custome_build_path) = t.src_path().path() {
589            let abs_custome_build_path =
590                paths::normalize_path(&pkg.root().join(custome_build_path));
591            if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
592            {
593                error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
594            }
595        }
596    }
597
598    result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
599
600    Ok(result)
601}
602
603fn check_for_file_and_add(
604    label: &str,
605    file_path: &Path,
606    abs_file_path: PathBuf,
607    pkg: &Package,
608    result: &mut Vec<ArchiveFile>,
609    ws: &Workspace<'_>,
610) -> CargoResult<()> {
611    match abs_file_path.strip_prefix(&pkg.root()) {
612        Ok(rel_file_path) => {
613            if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
614                result.push(ArchiveFile {
615                    rel_path: rel_file_path.to_path_buf(),
616                    rel_str: rel_file_path
617                        .to_str()
618                        .expect("everything was utf8")
619                        .to_string(),
620                    contents: FileContents::OnDisk(abs_file_path),
621                })
622            }
623        }
624        Err(_) => {
625            // The file exists somewhere outside of the package.
626            let file_name = file_path.file_name().unwrap();
627            if result.iter().any(|ar| ar.rel_path == file_name) {
628                ws.gctx().shell().warn(&format!(
629                    "{} `{}` appears to be a path outside of the package, \
630                            but there is already a file named `{}` in the root of the package. \
631                            The archived crate will contain the copy in the root of the package. \
632                            Update the {} to point to the path relative \
633                            to the root of the package to remove this warning.",
634                    label,
635                    file_path.display(),
636                    file_name.to_str().unwrap(),
637                    label,
638                ))?;
639            } else {
640                result.push(ArchiveFile {
641                    rel_path: PathBuf::from(file_name),
642                    rel_str: file_name.to_str().unwrap().to_string(),
643                    contents: FileContents::OnDisk(abs_file_path),
644                })
645            }
646        }
647    }
648    Ok(())
649}
650
651fn error_on_nonexistent_file(
652    pkg: &Package,
653    path: &Path,
654    manifest_key_name: &'static str,
655    invalid: &mut Vec<String>,
656) {
657    let rel_msg = if path.is_absolute() {
658        "".to_string()
659    } else {
660        format!(" (relative to `{}`)", pkg.root().display())
661    };
662
663    let msg = format!(
664        "{manifest_key_name} `{}` does not appear to exist{}.\n\
665                Please update the {manifest_key_name} setting in the manifest at `{}`.",
666        path.display(),
667        rel_msg,
668        pkg.manifest_path().display()
669    );
670
671    invalid.push(msg);
672}
673
674fn error_custom_build_file_not_in_package(
675    pkg: &Package,
676    path: &Path,
677    target: &Target,
678) -> CargoResult<Vec<ArchiveFile>> {
679    let tip = {
680        let description_name = target.description_named();
681        if path.is_file() {
682            format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
683            It is at `{}`, whereas the root the package is `{}`.\n",
684            path.display(), pkg.root().display()
685            )
686        } else {
687            format!("the source file of {description_name} doesn't appear to exist.\n",)
688        }
689    };
690    let msg = format!(
691        "{}\
692        This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
693        Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
694        tip,  pkg.manifest_path().display()
695    );
696    anyhow::bail!(msg)
697}
698
699/// Construct `Cargo.lock` for the package to be published.
700fn build_lock(
701    ws: &Workspace<'_>,
702    publish_pkg: &Package,
703    local_reg: Option<&TmpRegistry<'_>>,
704) -> CargoResult<String> {
705    let gctx = ws.gctx();
706    let orig_resolve = ops::load_pkg_lockfile(ws)?;
707
708    let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
709
710    // The local registry is an overlay used for simulating workspace packages
711    // that are supposed to be in the published registry, but that aren't there
712    // yet.
713    if let Some(local_reg) = local_reg {
714        tmp_ws.add_local_overlay(
715            local_reg.upstream,
716            local_reg.root.as_path_unlocked().to_owned(),
717        );
718    }
719    let mut tmp_reg = tmp_ws.package_registry()?;
720
721    let mut new_resolve = ops::resolve_with_previous(
722        &mut tmp_reg,
723        &tmp_ws,
724        &CliFeatures::new_all(true),
725        HasDevUnits::Yes,
726        orig_resolve.as_ref(),
727        None,
728        &[],
729        true,
730    )?;
731
732    let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
733
734    if let Some(orig_resolve) = orig_resolve {
735        compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
736    }
737    check_yanked(
738        gctx,
739        &pkg_set,
740        &new_resolve,
741        "consider updating to a version that is not yanked",
742    )?;
743
744    ops::resolve_to_string(&tmp_ws, &mut new_resolve)
745}
746
747// Checks that the package has some piece of metadata that a human can
748// use to tell what the package is about.
749fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
750    let md = pkg.manifest().metadata();
751
752    let mut missing = vec![];
753
754    macro_rules! lacking {
755        ($( $($field: ident)||* ),*) => {{
756            $(
757                if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
758                    $(missing.push(stringify!($field).replace("_", "-"));)*
759                }
760            )*
761        }}
762    }
763    lacking!(
764        description,
765        license || license_file,
766        documentation || homepage || repository
767    );
768
769    if !missing.is_empty() {
770        let mut things = missing[..missing.len() - 1].join(", ");
771        // `things` will be empty if and only if its length is 1 (i.e., the only case
772        // to have no `or`).
773        if !things.is_empty() {
774            things.push_str(" or ");
775        }
776        things.push_str(missing.last().unwrap());
777
778        gctx.shell().warn(&format!(
779            "manifest has no {things}.\n\
780             See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
781            things = things
782        ))?
783    }
784
785    Ok(())
786}
787
788/// Compresses and packages a list of [`ArchiveFile`]s and writes into the given file.
789///
790/// Returns the uncompressed size of the contents of the new archive file.
791fn tar(
792    ws: &Workspace<'_>,
793    pkg: &Package,
794    local_reg: Option<&TmpRegistry<'_>>,
795    ar_files: Vec<ArchiveFile>,
796    dst: &File,
797    filename: &str,
798) -> CargoResult<u64> {
799    // Prepare the encoder and its header.
800    let filename = Path::new(filename);
801    let encoder = GzBuilder::new()
802        .filename(paths::path2bytes(filename)?)
803        .write(dst, Compression::best());
804
805    // Put all package files into a compressed archive.
806    let mut ar = Builder::new(encoder);
807    ar.sparse(false);
808    let gctx = ws.gctx();
809
810    let base_name = format!("{}-{}", pkg.name(), pkg.version());
811    let base_path = Path::new(&base_name);
812    let included = ar_files
813        .iter()
814        .map(|ar_file| ar_file.rel_path.clone())
815        .collect::<Vec<_>>();
816    let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
817
818    let mut uncompressed_size = 0;
819    for ar_file in ar_files {
820        let ArchiveFile {
821            rel_path,
822            rel_str,
823            contents,
824        } = ar_file;
825        let ar_path = base_path.join(&rel_path);
826        gctx.shell()
827            .verbose(|shell| shell.status("Archiving", &rel_str))?;
828        let mut header = Header::new_gnu();
829        match contents {
830            FileContents::OnDisk(disk_path) => {
831                let mut file = File::open(&disk_path).with_context(|| {
832                    format!("failed to open for archiving: `{}`", disk_path.display())
833                })?;
834                let metadata = file.metadata().with_context(|| {
835                    format!("could not learn metadata for: `{}`", disk_path.display())
836                })?;
837                header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
838                header.set_cksum();
839                ar.append_data(&mut header, &ar_path, &mut file)
840                    .with_context(|| {
841                        format!("could not archive source file `{}`", disk_path.display())
842                    })?;
843                uncompressed_size += metadata.len() as u64;
844            }
845            FileContents::Generated(generated_kind) => {
846                let contents = match generated_kind {
847                    GeneratedFile::Manifest(_) => {
848                        publish_pkg.manifest().to_normalized_contents()?
849                    }
850                    GeneratedFile::Lockfile(_) => build_lock(ws, &publish_pkg, local_reg)?,
851                    GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
852                };
853                header.set_entry_type(EntryType::file());
854                header.set_mode(0o644);
855                header.set_size(contents.len() as u64);
856                // use something nonzero to avoid rust-lang/cargo#9512
857                header.set_mtime(1);
858                header.set_cksum();
859                ar.append_data(&mut header, &ar_path, contents.as_bytes())
860                    .with_context(|| format!("could not archive source file `{}`", rel_str))?;
861                uncompressed_size += contents.len() as u64;
862            }
863        }
864    }
865
866    let encoder = ar.into_inner()?;
867    encoder.finish()?;
868    Ok(uncompressed_size)
869}
870
871/// Generate warnings when packaging Cargo.lock, and the resolve have changed.
872fn compare_resolve(
873    gctx: &GlobalContext,
874    current_pkg: &Package,
875    orig_resolve: &Resolve,
876    new_resolve: &Resolve,
877) -> CargoResult<()> {
878    if gctx.shell().verbosity() != Verbosity::Verbose {
879        return Ok(());
880    }
881    let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
882    let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
883    let added = new_set.difference(&orig_set);
884    // Removed entries are ignored, this is used to quickly find hints for why
885    // an entry changed.
886    let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
887    for pkg_id in added {
888        if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
889            // Skip the package that is being created, since its SourceId
890            // (directory) changes.
891            continue;
892        }
893        // Check for candidates where the source has changed (such as [patch]
894        // or a dependency with multiple sources like path/version).
895        let removed_candidates: Vec<&PackageId> = removed
896            .iter()
897            .filter(|orig_pkg_id| {
898                orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
899            })
900            .cloned()
901            .collect();
902        let extra = match removed_candidates.len() {
903            0 => {
904                // This can happen if the original was out of date.
905                let previous_versions: Vec<&PackageId> = removed
906                    .iter()
907                    .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
908                    .cloned()
909                    .collect();
910                match previous_versions.len() {
911                    0 => String::new(),
912                    1 => format!(
913                        ", previous version was `{}`",
914                        previous_versions[0].version()
915                    ),
916                    _ => format!(
917                        ", previous versions were: {}",
918                        previous_versions
919                            .iter()
920                            .map(|pkg_id| format!("`{}`", pkg_id.version()))
921                            .collect::<Vec<_>>()
922                            .join(", ")
923                    ),
924                }
925            }
926            1 => {
927                // This can happen for multi-sourced dependencies like
928                // `{path="...", version="..."}` or `[patch]` replacement.
929                // `[replace]` is not captured in Cargo.lock.
930                format!(
931                    ", was originally sourced from `{}`",
932                    removed_candidates[0].source_id()
933                )
934            }
935            _ => {
936                // I don't know if there is a way to actually trigger this,
937                // but handle it just in case.
938                let comma_list = removed_candidates
939                    .iter()
940                    .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
941                    .collect::<Vec<_>>()
942                    .join(", ");
943                format!(
944                    ", was originally sourced from one of these sources: {}",
945                    comma_list
946                )
947            }
948        };
949        let msg = format!(
950            "package `{}` added to the packaged Cargo.lock file{}",
951            pkg_id, extra
952        );
953        gctx.shell().note(msg)?;
954    }
955    Ok(())
956}
957
958pub fn check_yanked(
959    gctx: &GlobalContext,
960    pkg_set: &PackageSet<'_>,
961    resolve: &Resolve,
962    hint: &str,
963) -> CargoResult<()> {
964    // Checking the yanked status involves taking a look at the registry and
965    // maybe updating files, so be sure to lock it here.
966    let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
967
968    let mut sources = pkg_set.sources_mut();
969    let mut pending: Vec<PackageId> = resolve.iter().collect();
970    let mut results = Vec::new();
971    for (_id, source) in sources.sources_mut() {
972        source.invalidate_cache();
973    }
974    while !pending.is_empty() {
975        pending.retain(|pkg_id| {
976            if let Some(source) = sources.get_mut(pkg_id.source_id()) {
977                match source.is_yanked(*pkg_id) {
978                    Poll::Ready(result) => results.push((*pkg_id, result)),
979                    Poll::Pending => return true,
980                }
981            }
982            false
983        });
984        for (_id, source) in sources.sources_mut() {
985            source.block_until_ready()?;
986        }
987    }
988
989    for (pkg_id, is_yanked) in results {
990        if is_yanked? {
991            gctx.shell().warn(format!(
992                "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
993                pkg_id,
994                pkg_id.source_id().display_registry_name(),
995                hint
996            ))?;
997        }
998    }
999    Ok(())
1000}
1001
1002// It can often be the case that files of a particular name on one platform
1003// can't actually be created on another platform. For example files with colons
1004// in the name are allowed on Unix but not on Windows.
1005//
1006// To help out in situations like this, issue about weird filenames when
1007// packaging as a "heads up" that something may not work on other platforms.
1008fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1009    let Some(name) = file.file_name() else {
1010        return Ok(());
1011    };
1012    let Some(name) = name.to_str() else {
1013        anyhow::bail!(
1014            "path does not have a unicode filename which may not unpack \
1015             on all platforms: {}",
1016            file.display()
1017        )
1018    };
1019    let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1020    if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1021        anyhow::bail!(
1022            "cannot package a filename with a special character `{}`: {}",
1023            c,
1024            file.display()
1025        )
1026    }
1027    if restricted_names::is_windows_reserved_path(file) {
1028        shell.warn(format!(
1029            "file {} is a reserved Windows filename, \
1030                it will not work on Windows platforms",
1031            file.display()
1032        ))?;
1033    }
1034    Ok(())
1035}
1036
1037/// Manages a temporary local registry that we use to overlay our new packages on the
1038/// upstream registry. This way we can build lockfiles that depend on the new packages even
1039/// before they're published.
1040struct TmpRegistry<'a> {
1041    gctx: &'a GlobalContext,
1042    upstream: SourceId,
1043    root: Filesystem,
1044    _lock: FileLock,
1045}
1046
1047impl<'a> TmpRegistry<'a> {
1048    fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1049        root.create_dir()?;
1050        let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1051        let slf = Self {
1052            gctx,
1053            root,
1054            upstream,
1055            _lock,
1056        };
1057        // If there's an old temporary registry, delete it.
1058        let index_path = slf.index_path().into_path_unlocked();
1059        if index_path.exists() {
1060            paths::remove_dir_all(index_path)?;
1061        }
1062        slf.index_path().create_dir()?;
1063        Ok(slf)
1064    }
1065
1066    fn index_path(&self) -> Filesystem {
1067        self.root.join("index")
1068    }
1069
1070    fn add_package(
1071        &mut self,
1072        ws: &Workspace<'_>,
1073        package: &Package,
1074        tar: &FileLock,
1075    ) -> CargoResult<()> {
1076        debug!(
1077            "adding package {}@{} to local overlay at {}",
1078            package.name(),
1079            package.version(),
1080            self.root.as_path_unlocked().display()
1081        );
1082        {
1083            let mut tar_copy = self.root.open_rw_exclusive_create(
1084                package.package_id().tarball_name(),
1085                self.gctx,
1086                "temporary package registry",
1087            )?;
1088            tar.file().seek(SeekFrom::Start(0))?;
1089            std::io::copy(&mut tar.file(), &mut tar_copy)?;
1090            tar_copy.flush()?;
1091        }
1092
1093        let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1094
1095        tar.file().seek(SeekFrom::Start(0))?;
1096        let cksum = cargo_util::Sha256::new()
1097            .update_file(tar.file())?
1098            .finish_hex();
1099
1100        let deps: Vec<_> = new_crate
1101            .deps
1102            .into_iter()
1103            .map(|dep| {
1104                let name = dep
1105                    .explicit_name_in_toml
1106                    .clone()
1107                    .unwrap_or_else(|| dep.name.clone())
1108                    .into();
1109                let package = dep
1110                    .explicit_name_in_toml
1111                    .as_ref()
1112                    .map(|_| dep.name.clone().into());
1113                RegistryDependency {
1114                    name: name,
1115                    req: dep.version_req.into(),
1116                    features: dep.features.into_iter().map(|x| x.into()).collect(),
1117                    optional: dep.optional,
1118                    default_features: dep.default_features,
1119                    target: dep.target.map(|x| x.into()),
1120                    kind: Some(dep.kind.into()),
1121                    registry: dep.registry.map(|x| x.into()),
1122                    package: package,
1123                    public: None,
1124                    artifact: dep
1125                        .artifact
1126                        .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1127                    bindep_target: dep.bindep_target.map(|x| x.into()),
1128                    lib: dep.lib,
1129                }
1130            })
1131            .collect();
1132
1133        let index_line = serde_json::to_string(&IndexPackage {
1134            name: new_crate.name.into(),
1135            vers: package.version().clone(),
1136            deps,
1137            features: new_crate
1138                .features
1139                .into_iter()
1140                .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1141                .collect(),
1142            features2: None,
1143            cksum,
1144            yanked: None,
1145            links: new_crate.links.map(|x| x.into()),
1146            rust_version: None,
1147            v: Some(2),
1148        })?;
1149
1150        let file =
1151            cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1152        let mut dst = self.index_path().open_rw_exclusive_create(
1153            file,
1154            self.gctx,
1155            "temporary package registry",
1156        )?;
1157        dst.write_all(index_line.as_bytes())?;
1158        Ok(())
1159    }
1160}