From f6289f90f96751df3a6b6bda100772ced342a706 Mon Sep 17 00:00:00 2001 From: Yuri Astrakhan Date: Sat, 28 Sep 2024 14:34:25 -0400 Subject: [PATCH] Migrate Cargo's &Option into Option<&T> As part of this, also modify `Option<&String>` to `Option<&str>`, same for `PathBuf` Trying out my new lint https://github.com/rust-lang/rust-clippy/pull/13336 - according to the [video](https://www.youtube.com/watch?v=6c7pZYP_iIE), this could lead to some performance and memory optimizations. Basic thoughts expressed in the video that seem to make sense: * `&Option` in an API breaks encapsulation: * caller must own T and move it into an Option to call with it * if returned, the owner must store it as Option internally in order to return it * Performance is subject to compiler optimization, but at the basics, `&Option` points to memory that has `presence` flag + value, whereas `Option<&T>` by specification is always optimized to a single pointer. --- Cargo.lock | 2 +- .../benches/global_cache_tracker.rs | 2 +- .../benchsuite/src/bin/capture-last-use.rs | 2 +- benches/benchsuite/src/lib.rs | 2 +- crates/cargo-util-schemas/src/manifest/mod.rs | 46 ++++++++++------- crates/resolver-tests/tests/resolve.rs | 6 +-- crates/xtask-bump-check/src/xtask.rs | 4 +- .../cargo-credential-1password/Cargo.toml | 2 +- .../cargo-credential-1password/src/main.rs | 31 +++++++----- src/bin/cargo/cli.rs | 2 +- src/cargo/core/compiler/build_plan.rs | 8 +-- src/cargo/core/compiler/custom_build.rs | 12 ++--- src/cargo/core/compiler/job_queue/mod.rs | 13 +++-- src/cargo/core/compiler/timings.rs | 6 +-- src/cargo/core/manifest.rs | 4 +- src/cargo/core/package.rs | 4 +- src/cargo/core/profiles.rs | 4 +- src/cargo/core/registry.rs | 6 +-- src/cargo/core/source_id.rs | 14 +++--- src/cargo/core/workspace.rs | 20 ++++---- src/cargo/ops/cargo_compile/mod.rs | 2 +- src/cargo/ops/cargo_compile/unit_generator.rs | 7 ++- src/cargo/ops/cargo_package.rs | 2 +- src/cargo/ops/cargo_test.rs | 8 +-- src/cargo/ops/registry/mod.rs | 6 +-- src/cargo/ops/registry/publish.rs | 2 +- src/cargo/util/command_prelude.rs | 2 +- src/cargo/util/context/mod.rs | 21 +++++--- src/cargo/util/diagnostic_server.rs | 8 +-- src/cargo/util/lints.rs | 14 +++--- src/cargo/util/toml/mod.rs | 50 +++++++++---------- src/cargo/util/toml/targets.rs | 10 ++-- tests/testsuite/config.rs | 2 +- 33 files changed, 174 insertions(+), 150 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cb62034320c..930474881a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -359,7 +359,7 @@ dependencies = [ [[package]] name = "cargo-credential-1password" -version = "0.4.4" +version = "0.5.0" dependencies = [ "cargo-credential", "serde", diff --git a/benches/benchsuite/benches/global_cache_tracker.rs b/benches/benchsuite/benches/global_cache_tracker.rs index f879b07eb41..d28be0bb66f 100644 --- a/benches/benchsuite/benches/global_cache_tracker.rs +++ b/benches/benchsuite/benches/global_cache_tracker.rs @@ -43,7 +43,7 @@ fn initialize_context() -> GlobalContext { false, false, false, - &None, + None, &["gc".to_string()], &[], ) diff --git a/benches/benchsuite/src/bin/capture-last-use.rs b/benches/benchsuite/src/bin/capture-last-use.rs index dc226109baa..0bfe895a549 100644 --- a/benches/benchsuite/src/bin/capture-last-use.rs +++ b/benches/benchsuite/src/bin/capture-last-use.rs @@ -36,7 +36,7 @@ fn main() { false, false, false, - &None, + None, &["gc".to_string()], &[], ) diff --git a/benches/benchsuite/src/lib.rs b/benches/benchsuite/src/lib.rs index 327c04c4f05..271396c4310 100644 --- a/benches/benchsuite/src/lib.rs +++ b/benches/benchsuite/src/lib.rs @@ -188,7 +188,7 @@ impl Fixtures { false, false, false, - &Some(self.target_dir()), + Some(&self.target_dir()), &[], &[], ) diff --git a/crates/cargo-util-schemas/src/manifest/mod.rs b/crates/cargo-util-schemas/src/manifest/mod.rs index 0fa5be5c12d..21a461aa606 100644 --- a/crates/cargo-util-schemas/src/manifest/mod.rs +++ b/crates/cargo-util-schemas/src/manifest/mod.rs @@ -239,8 +239,11 @@ impl TomlPackage { } } - pub fn normalized_edition(&self) -> Result, UnresolvedError> { - self.edition.as_ref().map(|v| v.normalized()).transpose() + pub fn normalized_edition(&self) -> Result, UnresolvedError> { + self.edition + .as_ref() + .map(|v| v.normalized().map(String::as_str)) + .transpose() } pub fn normalized_rust_version(&self) -> Result, UnresolvedError> { @@ -258,7 +261,7 @@ impl TomlPackage { self.authors.as_ref().map(|v| v.normalized()).transpose() } - pub fn normalized_build(&self) -> Result, UnresolvedError> { + pub fn normalized_build(&self) -> Result, UnresolvedError> { let readme = self.build.as_ref().ok_or(UnresolvedError)?; match readme { StringOrBool::Bool(false) => Ok(None), @@ -279,30 +282,33 @@ impl TomlPackage { self.publish.as_ref().map(|v| v.normalized()).transpose() } - pub fn normalized_description(&self) -> Result, UnresolvedError> { + pub fn normalized_description(&self) -> Result, UnresolvedError> { self.description .as_ref() - .map(|v| v.normalized()) + .map(|v| v.normalized().map(String::as_str)) .transpose() } - pub fn normalized_homepage(&self) -> Result, UnresolvedError> { - self.homepage.as_ref().map(|v| v.normalized()).transpose() + pub fn normalized_homepage(&self) -> Result, UnresolvedError> { + self.homepage + .as_ref() + .map(|v| v.normalized().map(String::as_str)) + .transpose() } - pub fn normalized_documentation(&self) -> Result, UnresolvedError> { + pub fn normalized_documentation(&self) -> Result, UnresolvedError> { self.documentation .as_ref() - .map(|v| v.normalized()) + .map(|v| v.normalized().map(String::as_str)) .transpose() } - pub fn normalized_readme(&self) -> Result, UnresolvedError> { + pub fn normalized_readme(&self) -> Result, UnresolvedError> { let readme = self.readme.as_ref().ok_or(UnresolvedError)?; readme.normalized().and_then(|sb| match sb { StringOrBool::Bool(false) => Ok(None), StringOrBool::Bool(true) => Err(UnresolvedError), - StringOrBool::String(value) => Ok(Some(value)), + StringOrBool::String(value) => Ok(Some(value.as_str())), }) } @@ -314,19 +320,25 @@ impl TomlPackage { self.categories.as_ref().map(|v| v.normalized()).transpose() } - pub fn normalized_license(&self) -> Result, UnresolvedError> { - self.license.as_ref().map(|v| v.normalized()).transpose() + pub fn normalized_license(&self) -> Result, UnresolvedError> { + self.license + .as_ref() + .map(|v| v.normalized().map(String::as_str)) + .transpose() } - pub fn normalized_license_file(&self) -> Result, UnresolvedError> { + pub fn normalized_license_file(&self) -> Result, UnresolvedError> { self.license_file .as_ref() - .map(|v| v.normalized()) + .map(|v| v.normalized().map(String::as_str)) .transpose() } - pub fn normalized_repository(&self) -> Result, UnresolvedError> { - self.repository.as_ref().map(|v| v.normalized()).transpose() + pub fn normalized_repository(&self) -> Result, UnresolvedError> { + self.repository + .as_ref() + .map(|v| v.normalized().map(String::as_str)) + .transpose() } } diff --git a/crates/resolver-tests/tests/resolve.rs b/crates/resolver-tests/tests/resolve.rs index 5ac0a83dcbb..57192d27aff 100644 --- a/crates/resolver-tests/tests/resolve.rs +++ b/crates/resolver-tests/tests/resolve.rs @@ -69,7 +69,7 @@ proptest! { false, false, false, - &None, + None, &["minimal-versions".to_string()], &[], ) @@ -110,7 +110,7 @@ proptest! { false, false, false, - &None, + None, &["direct-minimal-versions".to_string()], &[], ) @@ -436,7 +436,7 @@ fn test_resolving_minimum_version_with_transitive_deps() { false, false, false, - &None, + None, &["minimal-versions".to_string()], &[], ) diff --git a/crates/xtask-bump-check/src/xtask.rs b/crates/xtask-bump-check/src/xtask.rs index 1b131383dad..02af284e17e 100644 --- a/crates/xtask-bump-check/src/xtask.rs +++ b/crates/xtask-bump-check/src/xtask.rs @@ -98,7 +98,7 @@ fn global_context_configure(gctx: &mut GlobalContext, args: &ArgMatches) -> CliR frozen, locked, offline, - &None, + None, &unstable_flags, &config_args, )?; @@ -333,7 +333,7 @@ fn changed<'r, 'ws>( let ws_members = ws .members() .filter(|pkg| pkg.name() != root_pkg_name) // Only take care of sub crates here. - .filter(|pkg| pkg.publish() != &Some(vec![])) // filter out `publish = false` + .filter(|pkg| pkg.publish() != Some(&vec![])) // filter out `publish = false` .map(|pkg| { // Having relative package root path so that we can compare with // paths of changed files to determine which package has changed. diff --git a/credential/cargo-credential-1password/Cargo.toml b/credential/cargo-credential-1password/Cargo.toml index 144b44070db..6a870205d41 100644 --- a/credential/cargo-credential-1password/Cargo.toml +++ b/credential/cargo-credential-1password/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cargo-credential-1password" -version = "0.4.4" +version = "0.5.0" rust-version.workspace = true edition.workspace = true license.workspace = true diff --git a/credential/cargo-credential-1password/src/main.rs b/credential/cargo-credential-1password/src/main.rs index 38b567bf2d4..676a62fc2df 100644 --- a/credential/cargo-credential-1password/src/main.rs +++ b/credential/cargo-credential-1password/src/main.rs @@ -114,7 +114,7 @@ impl OnePasswordKeychain { Ok(Some(buffer)) } - fn make_cmd(&self, session: &Option, args: &[&str]) -> Command { + fn make_cmd(&self, session: Option<&str>, args: &[&str]) -> Command { let mut cmd = Command::new("op"); cmd.args(args); if let Some(account) = &self.account { @@ -153,7 +153,7 @@ impl OnePasswordKeychain { Ok(buffer) } - fn search(&self, session: &Option, index_url: &str) -> Result, Error> { + fn search(&self, session: Option<&str>, index_url: &str) -> Result, Error> { let cmd = self.make_cmd( session, &[ @@ -192,7 +192,7 @@ impl OnePasswordKeychain { fn modify( &self, - session: &Option, + session: Option<&str>, id: &str, token: Secret<&str>, _name: Option<&str>, @@ -207,7 +207,7 @@ impl OnePasswordKeychain { fn create( &self, - session: &Option, + session: Option<&str>, index_url: &str, token: Secret<&str>, name: Option<&str>, @@ -235,7 +235,7 @@ impl OnePasswordKeychain { Ok(()) } - fn get_token(&self, session: &Option, id: &str) -> Result, Error> { + fn get_token(&self, session: Option<&str>, id: &str) -> Result, Error> { let cmd = self.make_cmd(session, &["item", "get", "--format=json", id]); let buffer = self.run_cmd(cmd)?; let item: Login = serde_json::from_str(&buffer) @@ -250,7 +250,7 @@ impl OnePasswordKeychain { } } - fn delete(&self, session: &Option, id: &str) -> Result<(), Error> { + fn delete(&self, session: Option<&str>, id: &str) -> Result<(), Error> { let cmd = self.make_cmd(session, &["item", "delete", id]); self.run_cmd(cmd)?; Ok(()) @@ -270,8 +270,8 @@ impl Credential for OnePasswordCredential { match action { Action::Get(_) => { let session = op.signin()?; - if let Some(id) = op.search(&session, registry.index_url)? { - op.get_token(&session, &id) + if let Some(id) = op.search(session.as_deref(), registry.index_url)? { + op.get_token(session.as_deref(), &id) .map(|token| CredentialResponse::Get { token, cache: CacheControl::Session, @@ -284,21 +284,26 @@ impl Credential for OnePasswordCredential { Action::Login(options) => { let session = op.signin()?; // Check if an item already exists. - if let Some(id) = op.search(&session, registry.index_url)? { + if let Some(id) = op.search(session.as_deref(), registry.index_url)? { eprintln!("note: token already exists for `{}`", registry.index_url); let token = cargo_credential::read_token(options, registry)?; - op.modify(&session, &id, token.as_deref(), None)?; + op.modify(session.as_deref(), &id, token.as_deref(), None)?; } else { let token = cargo_credential::read_token(options, registry)?; - op.create(&session, registry.index_url, token.as_deref(), None)?; + op.create( + session.as_deref(), + registry.index_url, + token.as_deref(), + None, + )?; } Ok(CredentialResponse::Login) } Action::Logout => { let session = op.signin()?; // Check if an item already exists. - if let Some(id) = op.search(&session, registry.index_url)? { - op.delete(&session, &id)?; + if let Some(id) = op.search(session.as_deref(), registry.index_url)? { + op.delete(session.as_deref(), &id)?; Ok(CredentialResponse::Logout) } else { Err(Error::NotFound) diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs index 9cc0401a31c..70dc478fcdc 100644 --- a/src/bin/cargo/cli.rs +++ b/src/bin/cargo/cli.rs @@ -436,7 +436,7 @@ fn configure_gctx( frozen, locked, offline, - arg_target_dir, + arg_target_dir.as_deref(), &unstable_flags, &config_args, )?; diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs index d116c07b5be..63c371331cd 100644 --- a/src/cargo/core/compiler/build_plan.rs +++ b/src/cargo/core/compiler/build_plan.rs @@ -64,10 +64,10 @@ impl Invocation { } } - pub fn add_output(&mut self, path: &Path, link: &Option) { + pub fn add_output(&mut self, path: &Path, link: Option<&Path>) { self.outputs.push(path.to_path_buf()); - if let Some(ref link) = *link { - self.links.insert(link.clone(), path.to_path_buf()); + if let Some(link) = link { + self.links.insert(link.to_path_buf(), path.to_path_buf()); } } @@ -134,7 +134,7 @@ impl BuildPlan { invocation.update_cmd(cmd)?; for output in outputs.iter() { - invocation.add_output(&output.path, &output.hardlink); + invocation.add_output(&output.path, output.hardlink.as_deref()); } Ok(()) diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index f36e741c65b..66bd85a88a1 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -555,7 +555,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul &script_out_dir, nightly_features_allowed, &targets, - &msrv, + msrv.as_ref(), )?; if json_messages { @@ -583,7 +583,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul &script_out_dir, nightly_features_allowed, &targets_fresh, - &msrv_fresh, + msrv_fresh.as_ref(), )?, }; @@ -639,7 +639,7 @@ impl BuildOutput { script_out_dir: &Path, nightly_features_allowed: bool, targets: &[Target], - msrv: &Option, + msrv: Option<&RustVersion>, ) -> CargoResult { let contents = paths::read_bytes(path)?; BuildOutput::parse( @@ -667,7 +667,7 @@ impl BuildOutput { script_out_dir: &Path, nightly_features_allowed: bool, targets: &[Target], - msrv: &Option, + msrv: Option<&RustVersion>, ) -> CargoResult { let mut library_paths = Vec::new(); let mut library_links = Vec::new(); @@ -717,7 +717,7 @@ impl BuildOutput { fn check_minimum_supported_rust_version_for_new_syntax( pkg_descr: &str, - msrv: &Option, + msrv: Option<&RustVersion>, flag: &str, ) -> CargoResult<()> { if let Some(msrv) = msrv { @@ -1250,7 +1250,7 @@ fn prev_build_output( &script_out_dir, build_runner.bcx.gctx.nightly_features_allowed, unit.pkg.targets(), - &unit.pkg.rust_version().cloned(), + unit.pkg.rust_version(), ) .ok(), prev_script_out_dir, diff --git a/src/cargo/core/compiler/job_queue/mod.rs b/src/cargo/core/compiler/job_queue/mod.rs index 6c55697c1a0..82729b2ed20 100644 --- a/src/cargo/core/compiler/job_queue/mod.rs +++ b/src/cargo/core/compiler/job_queue/mod.rs @@ -117,7 +117,7 @@ use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::fmt::Write as _; use std::io; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::sync::Arc; use std::thread::{self, Scope}; use std::time::Duration; @@ -499,7 +499,7 @@ impl<'gctx> JobQueue<'gctx> { pending_queue: Vec::new(), print: DiagnosticPrinter::new( build_runner.bcx.gctx, - &build_runner.bcx.rustc().workspace_wrapper, + build_runner.bcx.rustc().workspace_wrapper.as_deref(), ), finished: 0, per_package_future_incompat_reports: Vec::new(), @@ -661,7 +661,7 @@ impl<'gctx> DrainState<'gctx> { self.report_warning_count( build_runner.bcx.gctx, id, - &build_runner.bcx.rustc().workspace_wrapper, + build_runner.bcx.rustc().workspace_wrapper.as_deref(), ); self.active.remove(&id).unwrap() } @@ -807,7 +807,10 @@ impl<'gctx> DrainState<'gctx> { } let time_elapsed = util::elapsed(build_runner.bcx.gctx.creation_time().elapsed()); - if let Err(e) = self.timings.finished(build_runner, &errors.to_error()) { + if let Err(e) = self + .timings + .finished(build_runner, errors.to_error().as_ref()) + { self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, e); } if build_runner.bcx.build_config.emit_json() { @@ -1021,7 +1024,7 @@ impl<'gctx> DrainState<'gctx> { &mut self, gctx: &GlobalContext, id: JobId, - rustc_workspace_wrapper: &Option, + rustc_workspace_wrapper: Option<&Path>, ) { let count = match self.warning_count.remove(&id) { // An error could add an entry for a `Unit` diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index 05d37632e5d..2fea39a2d80 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -289,7 +289,7 @@ impl<'gctx> Timings<'gctx> { pub fn finished( &mut self, build_runner: &BuildRunner<'_, '_>, - error: &Option, + error: Option<&anyhow::Error>, ) -> CargoResult<()> { if !self.enabled { return Ok(()); @@ -308,7 +308,7 @@ impl<'gctx> Timings<'gctx> { fn report_html( &self, build_runner: &BuildRunner<'_, '_>, - error: &Option, + error: Option<&anyhow::Error>, ) -> CargoResult<()> { let duration = self.start.elapsed().as_secs_f64(); let timestamp = self.start_str.replace(&['-', ':'][..], ""); @@ -362,7 +362,7 @@ impl<'gctx> Timings<'gctx> { f: &mut impl Write, duration: f64, bcx: &BuildContext<'_, '_>, - error: &Option, + error: Option<&anyhow::Error>, ) -> CargoResult<()> { let targets: Vec = self .root_targets diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index d63dbd61de3..7dbb33741ec 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -555,8 +555,8 @@ impl Manifest { pub fn profiles(&self) -> Option<&TomlProfiles> { self.normalized_toml.profile.as_ref() } - pub fn publish(&self) -> &Option> { - &self.publish + pub fn publish(&self) -> Option<&Vec> { + self.publish.as_ref() } pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index ac1bcdc5cb7..de8da58975d 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -160,7 +160,7 @@ impl Package { /// Returns `None` if the package is set to publish. /// Returns `Some(allowed_registries)` if publishing is limited to specified /// registries or if package is set to not publish. - pub fn publish(&self) -> &Option> { + pub fn publish(&self) -> Option<&Vec> { self.manifest().publish() } /// Returns `true` if this package is a proc-macro. @@ -250,7 +250,7 @@ impl Package { edition: self.manifest().edition().to_string(), links: self.manifest().links().map(|s| s.to_owned()), metabuild: self.manifest().metabuild().cloned(), - publish: self.publish().as_ref().cloned(), + publish: self.publish().cloned(), default_run: self.manifest().default_run().map(|s| s.to_owned()), rust_version: self.rust_version().cloned(), } diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs index ce2c9e414f6..0866a1b9aef 100644 --- a/src/cargo/core/profiles.rs +++ b/src/cargo/core/profiles.rs @@ -372,7 +372,7 @@ impl Profiles { { continue; } - let found = validate_packages_unique(resolve, name, &profile.toml)?; + let found = validate_packages_unique(resolve, name, profile.toml.as_ref())?; // We intentionally do not validate unmatched packages for config // profiles, in case they are defined in a central location. This // iterates over the manifest profiles only. @@ -1330,7 +1330,7 @@ fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult, + toml: Option<&TomlProfile>, ) -> CargoResult> { let Some(toml) = toml else { return Ok(HashSet::new()); diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 37af3d7a130..72d0530f39f 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -414,7 +414,7 @@ impl<'gctx> PackageRegistry<'gctx> { let summaries = summaries.into_iter().map(|s| s.into_summary()).collect(); let (summary, should_unlock) = - match summary_for_patch(orig_patch, &locked, summaries, source) { + match summary_for_patch(orig_patch, locked.as_ref(), summaries, source) { Poll::Ready(x) => x, Poll::Pending => { patch_deps_pending.push(patch_dep_remaining); @@ -918,7 +918,7 @@ fn lock( /// via the original patch, so we need to inform the resolver to "unlock" it. fn summary_for_patch( orig_patch: &Dependency, - locked: &Option, + locked: Option<&LockedPatchDependency>, mut summaries: Vec, source: &mut dyn Source, ) -> Poll)>> { @@ -964,7 +964,7 @@ fn summary_for_patch( let orig_matches = orig_matches.into_iter().map(|s| s.into_summary()).collect(); - let summary = ready!(summary_for_patch(orig_patch, &None, orig_matches, source))?; + let summary = ready!(summary_for_patch(orig_patch, None, orig_matches, source))?; return Poll::Ready(Ok((summary.0, Some(locked.package_id)))); } // Try checking if there are *any* packages that match this by name. diff --git a/src/cargo/core/source_id.rs b/src/cargo/core/source_id.rs index 53eb7f16f7e..e09d17c543f 100644 --- a/src/cargo/core/source_id.rs +++ b/src/cargo/core/source_id.rs @@ -468,30 +468,30 @@ impl SourceId { /// Creates a new `SourceId` from this source with the given `precise`. pub fn with_git_precise(self, fragment: Option) -> SourceId { - self.with_precise(&fragment.map(|f| Precise::GitUrlFragment(f))) + self.with_precise(fragment.map(|f| Precise::GitUrlFragment(f))) } /// Creates a new `SourceId` from this source without a `precise`. pub fn without_precise(self) -> SourceId { - self.with_precise(&None) + self.with_precise(None) } /// Creates a new `SourceId` from this source without a `precise`. pub fn with_locked_precise(self) -> SourceId { - self.with_precise(&Some(Precise::Locked)) + self.with_precise(Some(Precise::Locked)) } /// Creates a new `SourceId` from this source with the `precise` from some other `SourceId`. pub fn with_precise_from(self, v: Self) -> SourceId { - self.with_precise(&v.inner.precise) + self.with_precise(v.inner.precise.clone()) } - fn with_precise(self, precise: &Option) -> SourceId { - if &self.inner.precise == precise { + fn with_precise(self, precise: Option) -> SourceId { + if self.inner.precise == precise { self } else { SourceId::wrap(SourceIdInner { - precise: precise.clone(), + precise, ..(*self.inner).clone() }) } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index d4871fef203..cc551cf3769 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -1852,19 +1852,19 @@ impl WorkspaceRootConfig { /// Creates a new Intermediate Workspace Root configuration. pub fn new( root_dir: &Path, - members: &Option>, - default_members: &Option>, - exclude: &Option>, - inheritable: &Option, - custom_metadata: &Option, + members: Option<&Vec>, + default_members: Option<&Vec>, + exclude: Option<&Vec>, + inheritable: Option<&InheritableFields>, + custom_metadata: Option<&toml::Value>, ) -> WorkspaceRootConfig { WorkspaceRootConfig { root_dir: root_dir.to_path_buf(), - members: members.clone(), - default_members: default_members.clone(), - exclude: exclude.clone().unwrap_or_default(), - inheritable_fields: inheritable.clone().unwrap_or_default(), - custom_metadata: custom_metadata.clone(), + members: members.cloned(), + default_members: default_members.cloned(), + exclude: exclude.cloned().unwrap_or_default(), + inheritable_fields: inheritable.cloned().unwrap_or_default(), + custom_metadata: custom_metadata.cloned(), } } /// Checks the path against the `excluded` list. diff --git a/src/cargo/ops/cargo_compile/mod.rs b/src/cargo/ops/cargo_compile/mod.rs index 17aaa922b8a..1f61d069b40 100644 --- a/src/cargo/ops/cargo_compile/mod.rs +++ b/src/cargo/ops/cargo_compile/mod.rs @@ -370,7 +370,7 @@ pub fn create_bcx<'a, 'gctx>( explicit_host_kind, mode: build_config.mode, resolve: &resolve, - workspace_resolve: &workspace_resolve, + workspace_resolve: workspace_resolve.as_ref(), resolved_features: &resolved_features, package_set: &pkg_set, profiles: &profiles, diff --git a/src/cargo/ops/cargo_compile/unit_generator.rs b/src/cargo/ops/cargo_compile/unit_generator.rs index ce10e173c6c..e978a6e0397 100644 --- a/src/cargo/ops/cargo_compile/unit_generator.rs +++ b/src/cargo/ops/cargo_compile/unit_generator.rs @@ -53,7 +53,7 @@ pub(super) struct UnitGenerator<'a, 'gctx> { pub explicit_host_kind: CompileKind, pub mode: CompileMode, pub resolve: &'a Resolve, - pub workspace_resolve: &'a Option, + pub workspace_resolve: Option<&'a Resolve>, pub resolved_features: &'a features::ResolvedFeatures, pub package_set: &'a PackageSet<'gctx>, pub profiles: &'a Profiles, @@ -564,9 +564,8 @@ Rustdoc did not scrape the following examples because they require dev-dependenc required_features: &[String], summary: &Summary, ) -> CargoResult<()> { - let resolve = match self.workspace_resolve { - None => return Ok(()), - Some(resolve) => resolve, + let Some(resolve) = self.workspace_resolve else { + return Ok(()); }; let mut shell = self.ws.gctx().shell(); diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 04708584027..0c6456a345d 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -251,7 +251,7 @@ fn do_package<'a>( } else { let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?; if let Some(local_reg) = local_reg.as_mut() { - if pkg.publish() != &Some(Vec::new()) { + if pkg.publish() != Some(Vec::new()).as_ref() { local_reg.add_package(ws, &pkg, &tarball)?; } } diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index 1144b31c63c..29c41687d67 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -133,7 +133,7 @@ fn run_unit_tests( cwd, unit, path, - script_meta, + script_meta.as_ref(), test_args, compilation, "unittests", @@ -331,7 +331,7 @@ fn display_no_run_information( cwd, unit, path, - script_meta, + script_meta.as_ref(), test_args, compilation, exec_type, @@ -355,7 +355,7 @@ fn cmd_builds( cwd: &Path, unit: &Unit, path: &PathBuf, - script_meta: &Option, + script_meta: Option<&Metadata>, test_args: &[&str], compilation: &Compilation<'_>, exec_type: &str, @@ -380,7 +380,7 @@ fn cmd_builds( ), }; - let mut cmd = compilation.target_process(path, unit.kind, &unit.pkg, *script_meta)?; + let mut cmd = compilation.target_process(path, unit.kind, &unit.pkg, script_meta.cloned())?; cmd.args(test_args); if unit.target.harness() && gctx.shell().verbosity() == Verbosity::Quiet { cmd.arg("--quiet"); diff --git a/src/cargo/ops/registry/mod.rs b/src/cargo/ops/registry/mod.rs index ea49a5dfa3b..e926d466311 100644 --- a/src/cargo/ops/registry/mod.rs +++ b/src/cargo/ops/registry/mod.rs @@ -326,7 +326,7 @@ pub(crate) fn infer_registry(pkgs: &[&Package]) -> CargoResult = pkgs .iter() - .filter(|p| p.publish() != &Some(Vec::new())) + .filter(|p| p.publish() != Some(Vec::new()).as_ref()) .collect(); let Some((first, rest)) = publishable_pkgs.split_first() else { @@ -335,7 +335,7 @@ pub(crate) fn infer_registry(pkgs: &[&Package]) -> CargoResult { Ok(Some(RegistryOrIndex::Registry(unique_pkg_reg.to_owned()))) } @@ -358,7 +358,7 @@ pub(crate) fn infer_registry(pkgs: &[&Package]) -> CargoResult>()) .reduce(|xs, ys| xs.intersection(&ys).cloned().collect()) .unwrap_or_default(); diff --git a/src/cargo/ops/registry/publish.rs b/src/cargo/ops/registry/publish.rs index 8ff92920c16..b93f4165367 100644 --- a/src/cargo/ops/registry/publish.rs +++ b/src/cargo/ops/registry/publish.rs @@ -690,7 +690,7 @@ fn package_list(pkgs: impl IntoIterator, final_sep: &str) -> S fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>) -> CargoResult<()> { for pkg in pkgs { - if pkg.publish() == &Some(Vec::new()) { + if pkg.publish() == Some(Vec::new()).as_ref() { bail!( "`{}` cannot be published.\n\ `package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.", diff --git a/src/cargo/util/command_prelude.rs b/src/cargo/util/command_prelude.rs index 247143319f9..262c809e7d0 100644 --- a/src/cargo/util/command_prelude.rs +++ b/src/cargo/util/command_prelude.rs @@ -1319,7 +1319,7 @@ fn new_gctx_for_completions() -> CargoResult { frozen, locked, offline, - &target_dir, + target_dir, unstable_flags, cli_config, )?; diff --git a/src/cargo/util/context/mod.rs b/src/cargo/util/context/mod.rs index c38fd6fd55a..12ed7cf3c27 100644 --- a/src/cargo/util/context/mod.rs +++ b/src/cargo/util/context/mod.rs @@ -415,7 +415,9 @@ impl GlobalContext { /// Gets the path to the `rustdoc` executable. pub fn rustdoc(&self) -> CargoResult<&Path> { self.rustdoc - .try_borrow_with(|| Ok(self.get_tool(Tool::Rustdoc, &self.build_config()?.rustdoc))) + .try_borrow_with(|| { + Ok(self.get_tool(Tool::Rustdoc, self.build_config()?.rustdoc.as_ref())) + }) .map(AsRef::as_ref) } @@ -426,14 +428,15 @@ impl GlobalContext { .join(".rustc_info.json") .into_path_unlocked() }); - let wrapper = self.maybe_get_tool("rustc_wrapper", &self.build_config()?.rustc_wrapper); + let wrapper = + self.maybe_get_tool("rustc_wrapper", self.build_config()?.rustc_wrapper.as_ref()); let rustc_workspace_wrapper = self.maybe_get_tool( "rustc_workspace_wrapper", - &self.build_config()?.rustc_workspace_wrapper, + self.build_config()?.rustc_workspace_wrapper.as_ref(), ); Rustc::new( - self.get_tool(Tool::Rustc, &self.build_config()?.rustc), + self.get_tool(Tool::Rustc, self.build_config()?.rustc.as_ref()), wrapper, rustc_workspace_wrapper, &self @@ -1016,7 +1019,7 @@ impl GlobalContext { frozen: bool, locked: bool, offline: bool, - target_dir: &Option, + target_dir: Option<&Path>, unstable_flags: &[String], cli_config: &[String], ) -> CargoResult<()> { @@ -1093,7 +1096,9 @@ impl GlobalContext { .ok() .and_then(|n| n.offline) .unwrap_or(false); - let cli_target_dir = target_dir.as_ref().map(|dir| Filesystem::new(dir.clone())); + let cli_target_dir = target_dir + .as_ref() + .map(|dir| Filesystem::new(dir.to_path_buf())); self.target_dir = cli_target_dir; Ok(()) @@ -1710,7 +1715,7 @@ impl GlobalContext { fn maybe_get_tool( &self, tool: &str, - from_config: &Option, + from_config: Option<&ConfigRelativePath>, ) -> Option { let var = tool.to_uppercase(); @@ -1739,7 +1744,7 @@ impl GlobalContext { /// /// This is intended for tools that are rustup proxies. If you need to get /// a tool that is not a rustup proxy, use `maybe_get_tool` instead. - fn get_tool(&self, tool: Tool, from_config: &Option) -> PathBuf { + fn get_tool(&self, tool: Tool, from_config: Option<&ConfigRelativePath>) -> PathBuf { let tool_str = tool.as_str(); self.maybe_get_tool(tool_str, from_config) .or_else(|| { diff --git a/src/cargo/util/diagnostic_server.rs b/src/cargo/util/diagnostic_server.rs index 93b378c54b8..37bb86671a8 100644 --- a/src/cargo/util/diagnostic_server.rs +++ b/src/cargo/util/diagnostic_server.rs @@ -4,7 +4,7 @@ use std::collections::HashSet; use std::io::{BufReader, Read, Write}; use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream}; -use std::path::PathBuf; +use std::path::Path; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{self, JoinHandle}; @@ -83,7 +83,7 @@ pub struct DiagnosticPrinter<'a> { /// This is used to get the correct bug report URL. For instance, /// if `clippy-driver` is set as the value for the wrapper, /// then the correct bug report URL for `clippy` can be obtained. - workspace_wrapper: &'a Option, + workspace_wrapper: Option<&'a Path>, // A set of messages that have already been printed. dedupe: HashSet, } @@ -91,7 +91,7 @@ pub struct DiagnosticPrinter<'a> { impl<'a> DiagnosticPrinter<'a> { pub fn new( gctx: &'a GlobalContext, - workspace_wrapper: &'a Option, + workspace_wrapper: Option<&'a Path>, ) -> DiagnosticPrinter<'a> { DiagnosticPrinter { gctx, @@ -243,7 +243,7 @@ fn gen_please_report_this_bug_text(url: &str) -> String { ) } -fn get_bug_report_url(rustc_workspace_wrapper: &Option) -> &str { +fn get_bug_report_url(rustc_workspace_wrapper: Option<&Path>) -> &str { let clippy = std::ffi::OsStr::new("clippy-driver"); let issue_link = match rustc_workspace_wrapper.as_ref().and_then(|x| x.file_stem()) { Some(wrapper) if wrapper == clippy => "https://github.com/rust-lang/rust-clippy/issues", diff --git a/src/cargo/util/lints.rs b/src/cargo/util/lints.rs index 4d1b5b6de88..7a236bf7f25 100644 --- a/src/cargo/util/lints.rs +++ b/src/cargo/util/lints.rs @@ -45,7 +45,7 @@ pub fn analyze_cargo_lints_table( let (_, reason, _) = level_priority( name, *default_level, - *edition_lint_opts, + edition_lint_opts.cloned(), pkg_lints, manifest.edition(), ); @@ -97,22 +97,22 @@ fn find_lint_or_group<'a>( ) -> Option<( &'static str, &LintLevel, - &Option<(Edition, LintLevel)>, - &Option<&'static Feature>, + Option<&(Edition, LintLevel)>, + Option<&'static Feature>, )> { if let Some(lint) = LINTS.iter().find(|l| l.name == name) { Some(( lint.name, &lint.default_level, - &lint.edition_lint_opts, - &lint.feature_gate, + lint.edition_lint_opts.as_ref(), + lint.feature_gate, )) } else if let Some(group) = LINT_GROUPS.iter().find(|g| g.name == name) { Some(( group.name, &group.default_level, - &group.edition_lint_opts, - &group.feature_gate, + group.edition_lint_opts.as_ref(), + group.feature_gate, )) } else { None diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index 54bd34d5ef8..6bbc6e9f099 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -254,11 +254,11 @@ fn to_workspace_root_config( }; let ws_root_config = WorkspaceRootConfig::new( package_root, - &normalized_toml.members, - &normalized_toml.default_members, - &normalized_toml.exclude, - &Some(inheritable), - &normalized_toml.metadata, + normalized_toml.members.as_ref(), + normalized_toml.default_members.as_ref(), + normalized_toml.exclude.as_ref(), + Some(&inheritable), + normalized_toml.metadata.as_ref(), ); ws_root_config } @@ -399,8 +399,8 @@ fn normalize_toml( warnings, )?; deprecated_underscore( - &original_toml.dev_dependencies2, - &original_toml.dev_dependencies, + original_toml.dev_dependencies2.as_ref(), + original_toml.dev_dependencies.as_ref(), "dev-dependencies", package_name, "package", @@ -420,8 +420,8 @@ fn normalize_toml( warnings, )?; deprecated_underscore( - &original_toml.build_dependencies2, - &original_toml.build_dependencies, + original_toml.build_dependencies2.as_ref(), + original_toml.build_dependencies.as_ref(), "build-dependencies", package_name, "package", @@ -455,8 +455,8 @@ fn normalize_toml( warnings, )?; deprecated_underscore( - &platform.dev_dependencies2, - &platform.dev_dependencies, + platform.dev_dependencies2.as_ref(), + platform.dev_dependencies.as_ref(), "dev-dependencies", name, "platform target", @@ -476,8 +476,8 @@ fn normalize_toml( warnings, )?; deprecated_underscore( - &platform.build_dependencies2, - &platform.build_dependencies, + platform.build_dependencies2.as_ref(), + platform.build_dependencies.as_ref(), "build-dependencies", name, "platform target", @@ -779,8 +779,8 @@ fn normalize_dependencies<'a>( )?; if let manifest::TomlDependency::Detailed(ref mut d) = resolved { deprecated_underscore( - &d.default_features2, - &d.default_features, + d.default_features2.as_ref(), + d.default_features.as_ref(), "default-features", name_in_toml, "dependency", @@ -1306,7 +1306,7 @@ pub fn to_real_manifest( &normalized_toml, package_root, edition, - &normalized_package.metabuild, + normalized_package.metabuild.as_ref(), warnings, )?; @@ -1454,19 +1454,19 @@ pub fn to_real_manifest( description: normalized_package .normalized_description() .expect("previously normalized") - .cloned(), + .map(str::to_string), homepage: normalized_package .normalized_homepage() .expect("previously normalized") - .cloned(), + .map(str::to_string), documentation: normalized_package .normalized_documentation() .expect("previously normalized") - .cloned(), + .map(str::to_string), readme: normalized_package .normalized_readme() .expect("previously normalized") - .cloned(), + .map(str::to_string), authors: normalized_package .normalized_authors() .expect("previously normalized") @@ -1475,15 +1475,15 @@ pub fn to_real_manifest( license: normalized_package .normalized_license() .expect("previously normalized") - .cloned(), + .map(str::to_string), license_file: normalized_package .normalized_license_file() .expect("previously normalized") - .cloned(), + .map(str::to_string), repository: normalized_package .normalized_repository() .expect("previously normalized") - .cloned(), + .map(str::to_string), keywords: normalized_package .normalized_keywords() .expect("previously normalized") @@ -2632,8 +2632,8 @@ fn emit_diagnostic( /// Warn about paths that have been deprecated and may conflict. fn deprecated_underscore( - old: &Option, - new: &Option, + old: Option<&T>, + new: Option<&T>, new_path: &str, name: &str, kind: &str, diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs index ebade72da19..c94a67a1e58 100644 --- a/src/cargo/util/toml/targets.rs +++ b/src/cargo/util/toml/targets.rs @@ -38,7 +38,7 @@ pub(super) fn to_targets( normalized_toml: &TomlManifest, package_root: &Path, edition: Edition, - metabuild: &Option, + metabuild: Option<&StringOrVec>, warnings: &mut Vec, ) -> CargoResult> { let mut targets = Vec::new(); @@ -1127,8 +1127,8 @@ fn validate_proc_macro( warnings: &mut Vec, ) -> CargoResult<()> { deprecated_underscore( - &target.proc_macro2, - &target.proc_macro, + target.proc_macro2.as_ref(), + target.proc_macro.as_ref(), "proc-macro", name_or_panic(target), format!("{kind} target").as_str(), @@ -1166,8 +1166,8 @@ fn validate_crate_types( warnings: &mut Vec, ) -> CargoResult<()> { deprecated_underscore( - &target.crate_type2, - &target.crate_type, + target.crate_type2.as_ref(), + target.crate_type.as_ref(), "crate-type", name_or_panic(target), format!("{kind} target").as_str(), diff --git a/tests/testsuite/config.rs b/tests/testsuite/config.rs index 8f9092c2fb2..4a3fdbdb74c 100644 --- a/tests/testsuite/config.rs +++ b/tests/testsuite/config.rs @@ -117,7 +117,7 @@ impl GlobalContextBuilder { false, false, false, - &None, + None, &self.unstable, &self.config_args, )?;