From f0f856502eb5b27e9c5766d86b0a95bfd1f98fc6 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Tue, 16 Apr 2019 16:11:28 -0400 Subject: [PATCH 1/9] remove the resolver warnings --- src/cargo/core/resolver/context.rs | 40 +++++++++++++++--------------- src/cargo/core/resolver/mod.rs | 19 ++++++++------ 2 files changed, 32 insertions(+), 27 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 8a704d864cc..18174777541 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -48,9 +48,6 @@ pub struct Context { // at the very end and actually construct the map that we're making. pub resolve_graph: RcList, pub resolve_replacements: RcList<(PackageId, PackageId)>, - - // These warnings are printed after resolution. - pub warnings: RcList, } /// When backtracking it can be useful to know how far back to go. @@ -109,7 +106,6 @@ impl Context { parents: Graph::new(), resolve_replacements: RcList::new(), activations: im_rc::HashMap::new(), - warnings: RcList::new(), } } @@ -174,7 +170,7 @@ impl Context { // First, figure out our set of dependencies based on the requested set // of features. This also calculates what features we're going to enable // for our own dependencies. - let deps = self.resolve_features(parent, candidate, method)?; + let deps = self.resolve_features(parent, candidate, method, None)?; // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. @@ -229,11 +225,12 @@ impl Context { } /// Returns all dependencies and the features we want from them. - fn resolve_features<'b>( + pub fn resolve_features<'b>( &mut self, parent: Option<&Summary>, s: &'b Summary, method: &'b Method<'_>, + config: Option<&crate::util::config::Config>, ) -> ActivateResult)>> { let dev_deps = match *method { Method::Everything => true, @@ -261,20 +258,23 @@ impl Context { // name. let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); used_features.insert(dep.name_in_toml()); - let always_required = !dep.is_optional() - && !s - .dependencies() - .iter() - .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); - if always_required && base.0 { - self.warnings.push(format!( - "Package `{}` does not have feature `{}`. It has a required dependency \ - with that name, but only optional dependencies can be used as features. \ - This is currently a warning to ease the transition, but it will become an \ - error in the future.", - s.package_id(), - dep.name_in_toml() - )); + if let Some(config) = config { + let mut shell = config.shell(); + let always_required = !dep.is_optional() + && !s + .dependencies() + .iter() + .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); + if always_required && base.0 { + shell.warn(&format!( + "Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features. \ + This is currently a warning to ease the transition, but it will become an \ + error in the future.", + s.package_id(), + dep.name_in_toml() + ))? + } } let mut base = base.1.clone(); base.extend(dep.features().iter()); diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 5c6e240328b..1fb10783cb8 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -158,13 +158,18 @@ pub fn resolve( trace!("resolved: {:?}", resolve); // If we have a shell, emit warnings about required deps used as feature. - if let Some(config) = config { - if print_warnings { - let mut shell = config.shell(); - let mut warnings = &cx.warnings; - while let Some(ref head) = warnings.head { - shell.warn(&head.0)?; - warnings = &head.1; + if print_warnings && config.is_some() { + let mut new_cx = cx.clone(); + for (j, _) in cx.activations.values() { + if let Some(features) = cx.resolve_features.get(&j.package_id()) { + let features: Vec<_> = features.iter().cloned().collect(); + let method = Method::Required { + dev_deps: false, + features: &features, + all_features: false, + uses_default_features: false, + }; + let _ = new_cx.resolve_features(None, j, &method, config); } } } From 4590e74f11c11aa5db0d758974fbddcb4afc7442 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Wed, 17 Apr 2019 14:20:36 -0400 Subject: [PATCH 2/9] calculate features the hard way --- src/cargo/core/resolver/errors.rs | 1 + src/cargo/core/resolver/mod.rs | 46 ++++++++++++++++++++++------- tests/testsuite/support/resolver.rs | 2 +- 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index 071423690de..f376734ae9f 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -50,6 +50,7 @@ impl fmt::Display for ResolveError { pub type ActivateResult = Result; +#[derive(Debug)] pub enum ActivateError { Fatal(failure::Error), Conflict(PackageId, ConflictReason), diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 1fb10783cb8..db1dd283432 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -160,18 +160,44 @@ pub fn resolve( // If we have a shell, emit warnings about required deps used as feature. if print_warnings && config.is_some() { let mut new_cx = cx.clone(); - for (j, _) in cx.activations.values() { - if let Some(features) = cx.resolve_features.get(&j.package_id()) { - let features: Vec<_> = features.iter().cloned().collect(); - let method = Method::Required { - dev_deps: false, - features: &features, - all_features: false, - uses_default_features: false, - }; - let _ = new_cx.resolve_features(None, j, &method, config); + new_cx.resolve_features = im_rc::HashMap::new(); + let mut features_from_dep = HashMap::new(); + for (summery, method) in summaries { + for (dep, features) in new_cx + .resolve_features(None, summery, &method, config) + .expect("can not resolve_features for a required summery") + { + features_from_dep.insert((summery.package_id(), dep), features); + } + } + for summery in resolve.sort().iter().rev().map(|id| { + cx.activations + .get(&id.as_activations_key()) + .expect("id in dependency graph but not in activations") + .0 + .clone() + }) { + for (parent, deps) in cx.parents.edges(&summery.package_id()) { + for dep in deps.iter() { + let features = features_from_dep + .remove(&(*parent, dep.clone())) + .expect("fulfilled a dep that was not needed"); + let method = Method::Required { + dev_deps: false, + features: &features, + all_features: false, + uses_default_features: dep.uses_default_features(), + }; + for (dep, features) in new_cx + .resolve_features(None, &summery, &method, config) + .expect("can not resolve_features for a used dep") + { + features_from_dep.insert((summery.package_id(), dep), features); + } + } } } + assert_eq!(cx.resolve_features, new_cx.resolve_features); } Ok(resolve) diff --git a/tests/testsuite/support/resolver.rs b/tests/testsuite/support/resolver.rs index bce8c317e81..d2e7ec8f184 100644 --- a/tests/testsuite/support/resolver.rs +++ b/tests/testsuite/support/resolver.rs @@ -114,7 +114,7 @@ pub fn resolve_with_config_raw( &mut registry, &HashSet::new(), config, - false, + true, true, ); From 0131d09dd46d6ad4606384b0dbc4b9ef8170c852 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Wed, 17 Apr 2019 17:16:02 -0400 Subject: [PATCH 3/9] move to a function --- src/cargo/core/resolver/mod.rs | 88 +++++++++++++++++++--------------- 1 file changed, 49 insertions(+), 39 deletions(-) diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index db1dd283432..0a151a3742b 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -159,45 +159,7 @@ pub fn resolve( // If we have a shell, emit warnings about required deps used as feature. if print_warnings && config.is_some() { - let mut new_cx = cx.clone(); - new_cx.resolve_features = im_rc::HashMap::new(); - let mut features_from_dep = HashMap::new(); - for (summery, method) in summaries { - for (dep, features) in new_cx - .resolve_features(None, summery, &method, config) - .expect("can not resolve_features for a required summery") - { - features_from_dep.insert((summery.package_id(), dep), features); - } - } - for summery in resolve.sort().iter().rev().map(|id| { - cx.activations - .get(&id.as_activations_key()) - .expect("id in dependency graph but not in activations") - .0 - .clone() - }) { - for (parent, deps) in cx.parents.edges(&summery.package_id()) { - for dep in deps.iter() { - let features = features_from_dep - .remove(&(*parent, dep.clone())) - .expect("fulfilled a dep that was not needed"); - let method = Method::Required { - dev_deps: false, - features: &features, - all_features: false, - uses_default_features: dep.uses_default_features(), - }; - for (dep, features) in new_cx - .resolve_features(None, &summery, &method, config) - .expect("can not resolve_features for a used dep") - { - features_from_dep.insert((summery.package_id(), dep), features); - } - } - } - } - assert_eq!(cx.resolve_features, new_cx.resolve_features); + emit_warnings(&cx, &resolve, summaries, config) } Ok(resolve) @@ -1129,3 +1091,51 @@ fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { } Ok(()) } + +/// re-run all used resolve_features so it can print warnings +fn emit_warnings( + cx: &Context, + resolve: &Resolve, + summaries: &[(Summary, Method<'_>)], + config: Option<&Config>, +) { + let mut new_cx = cx.clone(); + new_cx.resolve_features = im_rc::HashMap::new(); + let mut features_from_dep = HashMap::new(); + for (summery, method) in summaries { + for (dep, features) in new_cx + .resolve_features(None, summery, &method, config) + .expect("can not resolve_features for a required summery") + { + features_from_dep.insert((summery.package_id(), dep), features); + } + } + for summery in resolve.sort().iter().rev().map(|id| { + cx.activations + .get(&id.as_activations_key()) + .expect("id in dependency graph but not in activations") + .0 + .clone() + }) { + for (parent, deps) in cx.parents.edges(&summery.package_id()) { + for dep in deps.iter() { + let features = features_from_dep + .remove(&(*parent, dep.clone())) + .expect("fulfilled a dep that was not needed"); + let method = Method::Required { + dev_deps: false, + features: &features, + all_features: false, + uses_default_features: dep.uses_default_features(), + }; + for (dep, features) in new_cx + .resolve_features(None, &summery, &method, config) + .expect("can not resolve_features for a used dep") + { + features_from_dep.insert((summery.package_id(), dep), features); + } + } + } + } + assert_eq!(cx.resolve_features, new_cx.resolve_features); +} From df62a578bac4cbf7b536e6673f4496709b69bc08 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Wed, 17 Apr 2019 17:22:51 -0400 Subject: [PATCH 4/9] list used used_replacements after we figure out what is used --- src/cargo/core/resolver/context.rs | 19 ++++++++----------- src/cargo/core/resolver/mod.rs | 4 +--- src/cargo/core/resolver/types.rs | 14 ++++++++++++-- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 18174777541..4177a1c6b53 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -47,7 +47,6 @@ pub struct Context { // hash maps but are built up as "construction lists". We'll iterate these // at the very end and actually construct the map that we're making. pub resolve_graph: RcList, - pub resolve_replacements: RcList<(PackageId, PackageId)>, } /// When backtracking it can be useful to know how far back to go. @@ -104,7 +103,6 @@ impl Context { None }, parents: Graph::new(), - resolve_replacements: RcList::new(), activations: im_rc::HashMap::new(), } } @@ -331,15 +329,14 @@ impl Context { Ok(ret) } - pub fn resolve_replacements(&self) -> HashMap { - let mut replacements = HashMap::new(); - let mut cur = &self.resolve_replacements; - while let Some(ref node) = cur.head { - let (k, v) = node.0; - replacements.insert(k, v); - cur = &node.1; - } - replacements + pub fn resolve_replacements( + &self, + registry: &RegistryQueryer<'_>, + ) -> HashMap { + self.activations + .values() + .filter_map(|(s, _)| registry.used_replacement_for(s.package_id())) + .collect() } pub fn graph(&self) -> Graph> { diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 0a151a3742b..66ef578a724 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -143,7 +143,7 @@ pub fn resolve( } let resolve = Resolve::new( cx.graph(), - cx.resolve_replacements(), + cx.resolve_replacements(®istry), cx.resolve_features .iter() .map(|(k, v)| (*k, v.iter().map(|x| x.to_string()).collect())) @@ -668,8 +668,6 @@ fn activate( let candidate = match candidate.replace { Some(replace) => { - cx.resolve_replacements - .push((candidate_pid, replace.package_id())); if cx.flag_activated(&replace, method)? && activated { return Ok(None); } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index b399a51c695..268eb447de7 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -95,11 +95,12 @@ pub struct RegistryQueryer<'a> { pub registry: &'a mut (dyn Registry + 'a), replacements: &'a [(PackageIdSpec, Dependency)], try_to_use: &'a HashSet, - cache: HashMap>>, // If set the list of dependency candidates will be sorted by minimal // versions first. That allows `cargo update -Z minimal-versions` which will // specify minimum dependency versions to be used. minimal_versions: bool, + cache: HashMap>>, + used_replacements: HashMap, } impl<'a> RegistryQueryer<'a> { @@ -112,12 +113,17 @@ impl<'a> RegistryQueryer<'a> { RegistryQueryer { registry, replacements, - cache: HashMap::new(), try_to_use, minimal_versions, + cache: HashMap::new(), + used_replacements: HashMap::new(), } } + pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { + self.used_replacements.get(&p).map(|&r| (p, r)) + } + /// Queries the `registry` to return a list of candidates for `dep`. /// /// This method is the location where overrides are taken into account. If @@ -212,6 +218,10 @@ impl<'a> RegistryQueryer<'a> { for dep in summary.dependencies() { debug!("\t{} => {}", dep.package_name(), dep.version_req()); } + if let Some(r) = &replace { + self.used_replacements + .insert(summary.package_id(), r.package_id()); + } candidate.replace = replace; } From 097dbdf85da1742ae81703fb51fe220463131ca7 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Wed, 17 Apr 2019 17:26:40 -0400 Subject: [PATCH 5/9] invert parents to get graph --- src/cargo/core/resolver/context.rs | 30 +++++++++--------------------- src/cargo/core/resolver/mod.rs | 2 -- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 4177a1c6b53..94d88b48539 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -13,9 +13,7 @@ use crate::util::CargoResult; use crate::util::Graph; use super::errors::ActivateResult; -use super::types::{ - ConflictMap, ConflictReason, DepInfo, GraphNode, Method, RcList, RegistryQueryer, -}; +use super::types::{ConflictMap, ConflictReason, DepInfo, GraphNode, Method, RegistryQueryer}; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use super::encode::{Metadata, WorkspaceResolve}; @@ -37,16 +35,9 @@ pub struct Context { pub public_dependency: Option>>, - // This is somewhat redundant with the `resolve_graph` that stores the same data, - // but for querying in the opposite order. /// a way to look up for a package in activations what packages required it /// and all of the exact deps that it fulfilled. pub parents: Graph>>, - - // These are two cheaply-cloneable lists (O(1) clone) which are effectively - // hash maps but are built up as "construction lists". We'll iterate these - // at the very end and actually construct the map that we're making. - pub resolve_graph: RcList, } /// When backtracking it can be useful to know how far back to go. @@ -94,7 +85,6 @@ impl PackageId { impl Context { pub fn new(check_public_visible_dependencies: bool) -> Context { Context { - resolve_graph: RcList::new(), resolve_features: im_rc::HashMap::new(), links: im_rc::HashMap::new(), public_dependency: if check_public_visible_dependencies { @@ -122,7 +112,6 @@ impl Context { ); } im_rc::hashmap::Entry::Vacant(v) => { - self.resolve_graph.push(GraphNode::Add(id)); if let Some(link) = summary.links() { ensure!( self.links.insert(link, id).is_none(), @@ -340,16 +329,15 @@ impl Context { } pub fn graph(&self) -> Graph> { - let mut graph: Graph> = Graph::new(); - let mut cur = &self.resolve_graph; - while let Some(ref node) = cur.head { - match node.0 { - GraphNode::Add(ref p) => graph.add(p.clone()), - GraphNode::Link(ref a, ref b, ref dep) => { - graph.link(a.clone(), b.clone()).push(dep.clone()); - } + let mut graph = Graph::new(); + self.activations + .values() + .for_each(|(r, _)| graph.add(r.package_id())); + for i in self.parents.iter() { + graph.add(*i); + for (o, e) in self.parents.edges(i) { + *graph.link(*o, *i) = e.to_vec(); } - cur = &node.1; } graph } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 66ef578a724..ff6fa4d7d87 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -606,8 +606,6 @@ fn activate( let candidate_pid = candidate.summary.package_id(); if let Some((parent, dep)) = parent { let parent_pid = parent.package_id(); - cx.resolve_graph - .push(GraphNode::Link(parent_pid, candidate_pid, dep.clone())); Rc::make_mut( // add a edge from candidate to parent in the parents graph cx.parents.link(candidate_pid, parent_pid), From 8cd9b0c7fab941997d5c568d10fb2635111cb33b Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Wed, 17 Apr 2019 17:39:40 -0400 Subject: [PATCH 6/9] remove dead code --- src/cargo/core/resolver/context.rs | 2 +- src/cargo/core/resolver/mod.rs | 2 +- src/cargo/core/resolver/types.rs | 47 ------------------------------ 3 files changed, 2 insertions(+), 49 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 94d88b48539..8ff76017fe0 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -13,7 +13,7 @@ use crate::util::CargoResult; use crate::util::Graph; use super::errors::ActivateResult; -use super::types::{ConflictMap, ConflictReason, DepInfo, GraphNode, Method, RegistryQueryer}; +use super::types::{ConflictMap, ConflictReason, DepInfo, Method, RegistryQueryer}; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use super::encode::{Metadata, WorkspaceResolve}; diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index ff6fa4d7d87..c58b30b4da4 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -62,7 +62,7 @@ use crate::util::errors::CargoResult; use crate::util::profile; use self::context::{Activations, Context}; -use self::types::{Candidate, ConflictMap, ConflictReason, DepsFrame, GraphNode}; +use self::types::{Candidate, ConflictMap, ConflictReason, DepsFrame}; use self::types::{RcVecIter, RegistryQueryer, RemainingDeps, ResolverProgress}; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 268eb447de7..70b46442d75 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -491,50 +491,3 @@ where } impl ExactSizeIterator for RcVecIter {} - -pub struct RcList { - pub head: Option)>>, -} - -impl RcList { - pub fn new() -> RcList { - RcList { head: None } - } - - pub fn push(&mut self, data: T) { - let node = Rc::new(( - data, - RcList { - head: self.head.take(), - }, - )); - self.head = Some(node); - } -} - -// Not derived to avoid `T: Clone` -impl Clone for RcList { - fn clone(&self) -> RcList { - RcList { - head: self.head.clone(), - } - } -} - -// Avoid stack overflows on drop by turning recursion into a loop -impl Drop for RcList { - fn drop(&mut self) { - let mut cur = self.head.take(); - while let Some(head) = cur { - match Rc::try_unwrap(head) { - Ok((_data, mut next)) => cur = next.head.take(), - Err(_) => break, - } - } - } -} - -pub enum GraphNode { - Add(PackageId), - Link(PackageId, PackageId, Dependency), -} From d0c80ecb4b1667ffb27b8bfe8230f7effc9478bb Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Wed, 17 Apr 2019 17:39:40 -0400 Subject: [PATCH 7/9] address nits --- src/cargo/core/resolver/context.rs | 9 ++++++--- src/cargo/core/resolver/mod.rs | 26 +++++++++++++++++--------- src/cargo/core/resolver/types.rs | 17 +++++++++++++++-- tests/testsuite/resolve.rs | 10 +++++----- 4 files changed, 43 insertions(+), 19 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 8ff76017fe0..9eaf75c7867 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -9,6 +9,7 @@ use log::debug; use crate::core::interning::InternedString; use crate::core::{Dependency, FeatureValue, PackageId, SourceId, Summary}; +use crate::util::config::Config; use crate::util::CargoResult; use crate::util::Graph; @@ -217,7 +218,7 @@ impl Context { parent: Option<&Summary>, s: &'b Summary, method: &'b Method<'_>, - config: Option<&crate::util::config::Config>, + config: Option<&Config>, ) -> ActivateResult)>> { let dev_deps = match *method { Method::Everything => true, @@ -329,14 +330,16 @@ impl Context { } pub fn graph(&self) -> Graph> { - let mut graph = Graph::new(); + let mut graph: Graph> = Graph::new(); self.activations .values() .for_each(|(r, _)| graph.add(r.package_id())); for i in self.parents.iter() { graph.add(*i); for (o, e) in self.parents.edges(i) { - *graph.link(*o, *i) = e.to_vec(); + let old_link = graph.link(*o, *i); + debug_assert!(old_link.is_empty()); + *old_link = e.to_vec(); } } graph diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index c58b30b4da4..91fff9f9c2b 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -1088,7 +1088,11 @@ fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { Ok(()) } -/// re-run all used resolve_features so it can print warnings +/// Re-run all used `resolve_features` so it can print warnings. +/// Within the resolution loop we do not pass a `config` to `resolve_features` +/// this tells it not to print warnings. Hear we do pass `config` triggering it to print them. +/// This is done as the resolution is NP-Hard so the loop can potentially call `resolve features` +/// an exponential number of times, but this pass is linear in the number of dependencies. fn emit_warnings( cx: &Context, resolve: &Resolve, @@ -1098,22 +1102,26 @@ fn emit_warnings( let mut new_cx = cx.clone(); new_cx.resolve_features = im_rc::HashMap::new(); let mut features_from_dep = HashMap::new(); - for (summery, method) in summaries { + for (summary, method) in summaries { for (dep, features) in new_cx - .resolve_features(None, summery, &method, config) - .expect("can not resolve_features for a required summery") + .resolve_features(None, summary, &method, config) + .expect("can not resolve_features for a required summary") { - features_from_dep.insert((summery.package_id(), dep), features); + features_from_dep.insert((summary.package_id(), dep), features); } } - for summery in resolve.sort().iter().rev().map(|id| { + // crates enable features for their dependencies. + // So by iterating reverse topologically we process all of the parents before each child. + // Then we know all the needed features for each crate. + let topological_sort = resolve.sort(); + for summary in topological_sort.iter().rev().map(|id| { cx.activations .get(&id.as_activations_key()) .expect("id in dependency graph but not in activations") .0 .clone() }) { - for (parent, deps) in cx.parents.edges(&summery.package_id()) { + for (parent, deps) in cx.parents.edges(&summary.package_id()) { for dep in deps.iter() { let features = features_from_dep .remove(&(*parent, dep.clone())) @@ -1125,10 +1133,10 @@ fn emit_warnings( uses_default_features: dep.uses_default_features(), }; for (dep, features) in new_cx - .resolve_features(None, &summery, &method, config) + .resolve_features(None, &summary, &method, config) .expect("can not resolve_features for a used dep") { - features_from_dep.insert((summery.package_id(), dep), features); + features_from_dep.insert((summary.package_id(), dep), features); } } } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 70b46442d75..098f4f126d4 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -219,8 +219,21 @@ impl<'a> RegistryQueryer<'a> { debug!("\t{} => {}", dep.package_name(), dep.version_req()); } if let Some(r) = &replace { - self.used_replacements - .insert(summary.package_id(), r.package_id()); + if let Some(old) = self + .used_replacements + .insert(summary.package_id(), r.package_id()) + { + debug_assert_eq!( + r.package_id(), + old, + "we are inconsistent about witch replacement is used for {:?}, \ + one time we used {:?}, \ + now we are adding {:?}.", + summary.package_id(), + old, + r.package_id() + ) + } } candidate.replace = replace; diff --git a/tests/testsuite/resolve.rs b/tests/testsuite/resolve.rs index a1b3277748d..780da0fbec1 100644 --- a/tests/testsuite/resolve.rs +++ b/tests/testsuite/resolve.rs @@ -111,13 +111,13 @@ proptest! { ) { let reg = registry(input.clone()); let mut removed_input = input.clone(); - for (summery_idx, dep_idx) in indexes_to_remove { + for (summary_idx, dep_idx) in indexes_to_remove { if !removed_input.is_empty() { - let summery_idx = summery_idx.index(removed_input.len()); - let deps = removed_input[summery_idx].dependencies(); + let summary_idx = summary_idx.index(removed_input.len()); + let deps = removed_input[summary_idx].dependencies(); if !deps.is_empty() { - let new = remove_dep(&removed_input[summery_idx], dep_idx.index(deps.len())); - removed_input[summery_idx] = new; + let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len())); + removed_input[summary_idx] = new; } } } From a4737160bf40d7c52fc96c4d2a986b77351521c1 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Fri, 19 Apr 2019 14:23:20 -0400 Subject: [PATCH 8/9] fix replacements --- src/cargo/core/resolver/context.rs | 2 +- src/cargo/core/resolver/mod.rs | 28 +++++++++++++++++----------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 9eaf75c7867..213aa1186e2 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -338,7 +338,7 @@ impl Context { graph.add(*i); for (o, e) in self.parents.edges(i) { let old_link = graph.link(*o, *i); - debug_assert!(old_link.is_empty()); + assert!(old_link.is_empty()); *old_link = e.to_vec(); } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 91fff9f9c2b..5808fce3282 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -1103,24 +1103,30 @@ fn emit_warnings( new_cx.resolve_features = im_rc::HashMap::new(); let mut features_from_dep = HashMap::new(); for (summary, method) in summaries { + let replacement = &cx.activations[&resolve + .replacements() + .get(&summary.package_id()) + .unwrap_or(&summary.package_id()) + .as_activations_key()] + .0; for (dep, features) in new_cx - .resolve_features(None, summary, &method, config) - .expect("can not resolve_features for a required summary") + .resolve_features(None, replacement, &method, config) + .expect("can not resolve_features for a required summery") { - features_from_dep.insert((summary.package_id(), dep), features); + features_from_dep.insert((replacement.package_id(), dep), features); } } // crates enable features for their dependencies. // So by iterating reverse topologically we process all of the parents before each child. // Then we know all the needed features for each crate. let topological_sort = resolve.sort(); - for summary in topological_sort.iter().rev().map(|id| { - cx.activations - .get(&id.as_activations_key()) - .expect("id in dependency graph but not in activations") - .0 - .clone() - }) { + for id in topological_sort + .iter() + .rev() + .filter(|&id| !resolve.replacements().contains_key(id)) + { + let summary = &cx.activations[&id.as_activations_key()].0; + for (parent, deps) in cx.parents.edges(&summary.package_id()) { for dep in deps.iter() { let features = features_from_dep @@ -1133,7 +1139,7 @@ fn emit_warnings( uses_default_features: dep.uses_default_features(), }; for (dep, features) in new_cx - .resolve_features(None, &summary, &method, config) + .resolve_features(None, summary, &method, config) .expect("can not resolve_features for a used dep") { features_from_dep.insert((summary.package_id(), dep), features); From 35ff555b7073318f721680775ebfe8170f081eba Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Mon, 22 Apr 2019 17:31:55 -0400 Subject: [PATCH 9/9] just give up and make it an error --- src/cargo/core/resolver/context.rs | 35 ++++++------ src/cargo/core/resolver/errors.rs | 25 ++++++++- src/cargo/core/resolver/mod.rs | 68 ------------------------ src/cargo/core/resolver/types.rs | 13 +++++ src/cargo/ops/cargo_generate_lockfile.rs | 13 +---- src/cargo/ops/resolve.rs | 9 +--- tests/testsuite/features.rs | 31 ++++++----- tests/testsuite/support/resolver.rs | 1 - 8 files changed, 76 insertions(+), 119 deletions(-) diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 213aa1186e2..c494a073678 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -9,7 +9,6 @@ use log::debug; use crate::core::interning::InternedString; use crate::core::{Dependency, FeatureValue, PackageId, SourceId, Summary}; -use crate::util::config::Config; use crate::util::CargoResult; use crate::util::Graph; @@ -158,7 +157,7 @@ impl Context { // First, figure out our set of dependencies based on the requested set // of features. This also calculates what features we're going to enable // for our own dependencies. - let deps = self.resolve_features(parent, candidate, method, None)?; + let deps = self.resolve_features(parent, candidate, method)?; // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. @@ -218,7 +217,6 @@ impl Context { parent: Option<&Summary>, s: &'b Summary, method: &'b Method<'_>, - config: Option<&Config>, ) -> ActivateResult)>> { let dev_deps = match *method { Method::Everything => true, @@ -246,23 +244,26 @@ impl Context { // name. let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); used_features.insert(dep.name_in_toml()); - if let Some(config) = config { - let mut shell = config.shell(); - let always_required = !dep.is_optional() - && !s - .dependencies() - .iter() - .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); - if always_required && base.0 { - shell.warn(&format!( + let always_required = !dep.is_optional() + && !s + .dependencies() + .iter() + .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); + if always_required && base.0 { + return Err(match parent { + None => failure::format_err!( "Package `{}` does not have feature `{}`. It has a required dependency \ - with that name, but only optional dependencies can be used as features. \ - This is currently a warning to ease the transition, but it will become an \ - error in the future.", + with that name, but only optional dependencies can be used as features.", s.package_id(), dep.name_in_toml() - ))? - } + ) + .into(), + Some(p) => ( + p.package_id(), + ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), + ) + .into(), + }); } let mut base = base.1.clone(); base.extend(dep.features().iter()); diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index f376734ae9f..ef162a0ebc9 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -127,7 +127,7 @@ pub(super) fn activation_error( msg.push_str(&describe_path(&cx.parents.path_to_bottom(p))); } - let (features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors + let (features_errors, mut other_errors): (Vec<_>, Vec<_>) = other_errors .drain(..) .partition(|&(_, r)| r.is_missing_features()); @@ -146,6 +146,29 @@ pub(super) fn activation_error( // p == parent so the full path is redundant. } + let (required_dependency_as_features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors + .drain(..) + .partition(|&(_, r)| r.is_required_dependency_as_features()); + + for &(p, r) in required_dependency_as_features_errors.iter() { + if let ConflictReason::RequiredDependencyAsFeatures(ref features) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(&*p.name()); + msg.push_str("` depends on `"); + msg.push_str(&*dep.package_name()); + msg.push_str("`, with features: `"); + msg.push_str(features); + msg.push_str("` but `"); + msg.push_str(&*dep.package_name()); + msg.push_str("` does not have these features.\n"); + msg.push_str( + " It has a required dependency with that name, \ + but only optional dependencies can be used as features.\n", + ); + } + // p == parent so the full path is redundant. + } + if !other_errors.is_empty() { msg.push_str( "\n\nall possible versions conflict with \ diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 5808fce3282..d186883a658 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -124,7 +124,6 @@ pub fn resolve( registry: &mut dyn Registry, try_to_use: &HashSet, config: Option<&Config>, - print_warnings: bool, check_public_visible_dependencies: bool, ) -> CargoResult { let cx = Context::new(check_public_visible_dependencies); @@ -157,11 +156,6 @@ pub fn resolve( check_duplicate_pkgs_in_lockfile(&resolve)?; trace!("resolved: {:?}", resolve); - // If we have a shell, emit warnings about required deps used as feature. - if print_warnings && config.is_some() { - emit_warnings(&cx, &resolve, summaries, config) - } - Ok(resolve) } @@ -1087,65 +1081,3 @@ fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { } Ok(()) } - -/// Re-run all used `resolve_features` so it can print warnings. -/// Within the resolution loop we do not pass a `config` to `resolve_features` -/// this tells it not to print warnings. Hear we do pass `config` triggering it to print them. -/// This is done as the resolution is NP-Hard so the loop can potentially call `resolve features` -/// an exponential number of times, but this pass is linear in the number of dependencies. -fn emit_warnings( - cx: &Context, - resolve: &Resolve, - summaries: &[(Summary, Method<'_>)], - config: Option<&Config>, -) { - let mut new_cx = cx.clone(); - new_cx.resolve_features = im_rc::HashMap::new(); - let mut features_from_dep = HashMap::new(); - for (summary, method) in summaries { - let replacement = &cx.activations[&resolve - .replacements() - .get(&summary.package_id()) - .unwrap_or(&summary.package_id()) - .as_activations_key()] - .0; - for (dep, features) in new_cx - .resolve_features(None, replacement, &method, config) - .expect("can not resolve_features for a required summery") - { - features_from_dep.insert((replacement.package_id(), dep), features); - } - } - // crates enable features for their dependencies. - // So by iterating reverse topologically we process all of the parents before each child. - // Then we know all the needed features for each crate. - let topological_sort = resolve.sort(); - for id in topological_sort - .iter() - .rev() - .filter(|&id| !resolve.replacements().contains_key(id)) - { - let summary = &cx.activations[&id.as_activations_key()].0; - - for (parent, deps) in cx.parents.edges(&summary.package_id()) { - for dep in deps.iter() { - let features = features_from_dep - .remove(&(*parent, dep.clone())) - .expect("fulfilled a dep that was not needed"); - let method = Method::Required { - dev_deps: false, - features: &features, - all_features: false, - uses_default_features: dep.uses_default_features(), - }; - for (dep, features) in new_cx - .resolve_features(None, summary, &method, config) - .expect("can not resolve_features for a used dep") - { - features_from_dep.insert((summary.package_id(), dep), features); - } - } - } - } - assert_eq!(cx.resolve_features, new_cx.resolve_features); -} diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 098f4f126d4..dc12cbc0566 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -426,6 +426,12 @@ pub enum ConflictReason { /// candidate we're activating didn't actually have the feature `foo`. MissingFeatures(String), + /// A dependency listed features that ended up being a required dependency. + /// For example we tried to activate feature `foo` but the + /// candidate we're activating didn't actually have the feature `foo` + /// it had a dependency `foo` instead. + RequiredDependencyAsFeatures(InternedString), + // TODO: needs more info for `activation_error` // TODO: needs more info for `find_candidate` /// pub dep error @@ -446,6 +452,13 @@ impl ConflictReason { } false } + + pub fn is_required_dependency_as_features(&self) -> bool { + if let ConflictReason::RequiredDependencyAsFeatures(_) = *self { + return true; + } + false + } } /// A list of packages that have gotten in the way of resolving a dependency. diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index c17aa4a107d..d99c29e122b 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -21,16 +21,8 @@ pub struct UpdateOptions<'a> { pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { let mut registry = PackageRegistry::new(ws.config())?; - let resolve = ops::resolve_with_previous( - &mut registry, - ws, - Method::Everything, - None, - None, - &[], - true, - true, - )?; + let resolve = + ops::resolve_with_previous(&mut registry, ws, Method::Everything, None, None, &[], true)?; ops::write_pkg_lockfile(ws, &resolve)?; Ok(()) } @@ -92,7 +84,6 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes Some(&to_avoid), &[], true, - true, )?; // Summarize what is changing for the user. diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 352a9ad824f..c002db21ff9 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -23,7 +23,7 @@ version. This may also occur with an optional dependency that is not enabled."; /// lock file. pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { let mut registry = PackageRegistry::new(ws.config())?; - let resolve = resolve_with_registry(ws, &mut registry, true)?; + let resolve = resolve_with_registry(ws, &mut registry)?; let packages = get_resolved_packages(&resolve, registry)?; Ok((packages, resolve)) } @@ -64,7 +64,7 @@ pub fn resolve_ws_with_method<'a>( } else if ws.require_optional_deps() { // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. - let resolve = resolve_with_registry(ws, &mut registry, false)?; + let resolve = resolve_with_registry(ws, &mut registry)?; add_patches = false; // Second, resolve with precisely what we're doing. Filter out @@ -98,7 +98,6 @@ pub fn resolve_ws_with_method<'a>( None, specs, add_patches, - true, )?; let packages = get_resolved_packages(&resolved_with_overrides, registry)?; @@ -109,7 +108,6 @@ pub fn resolve_ws_with_method<'a>( fn resolve_with_registry<'cfg>( ws: &Workspace<'cfg>, registry: &mut PackageRegistry<'cfg>, - warn: bool, ) -> CargoResult { let prev = ops::load_pkg_lockfile(ws)?; let resolve = resolve_with_previous( @@ -120,7 +118,6 @@ fn resolve_with_registry<'cfg>( None, &[], true, - warn, )?; if !ws.is_ephemeral() { @@ -146,7 +143,6 @@ pub fn resolve_with_previous<'cfg>( to_avoid: Option<&HashSet>, specs: &[PackageIdSpec], register_patches: bool, - warn: bool, ) -> CargoResult { // Here we place an artificial limitation that all non-registry sources // cannot be locked at more than one revision. This means that if a Git @@ -334,7 +330,6 @@ pub fn resolve_with_previous<'cfg>( registry, &try_to_use, Some(ws.config()), - warn, false, // TODO: use "public and private dependencies" feature flag )?; resolved.register_used_patches(registry.patches()); diff --git a/tests/testsuite/features.rs b/tests/testsuite/features.rs index 1dfe27bcf48..35a4fa9d247 100644 --- a/tests/testsuite/features.rs +++ b/tests/testsuite/features.rs @@ -291,13 +291,12 @@ fn invalid9() { .file("bar/src/lib.rs", "") .build(); - p.cargo("build --features bar").with_stderr("\ -warning: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with \ -that name, but only optional dependencies can be used as features. [..] - Compiling bar v0.0.1 ([..]) - Compiling foo v0.0.1 ([..]) - Finished dev [unoptimized + debuginfo] target(s) in [..]s -").run(); + p.cargo("build --features bar") +.with_stderr( + "\ +error: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with that name, but only optional dependencies can be used as features. +", + ).with_status(101).run(); } #[test] @@ -335,13 +334,17 @@ fn invalid10() { .build(); p.cargo("build").with_stderr("\ -warning: Package `bar v0.0.1 ([..])` does not have feature `baz`. It has a required dependency with \ -that name, but only optional dependencies can be used as features. [..] - Compiling baz v0.0.1 ([..]) - Compiling bar v0.0.1 ([..]) - Compiling foo v0.0.1 ([..]) - Finished dev [unoptimized + debuginfo] target(s) in [..]s -").run(); +error: failed to select a version for `bar`. + ... required by package `foo v0.0.1 ([..])` +versions that meet the requirements `*` are: 0.0.1 + +the package `foo` depends on `bar`, with features: `baz` but `bar` does not have these features. + It has a required dependency with that name, but only optional dependencies can be used as features. + + +failed to select a version for `bar` which could resolve this conflict +").with_status(101) + .run(); } #[test] diff --git a/tests/testsuite/support/resolver.rs b/tests/testsuite/support/resolver.rs index d2e7ec8f184..84bbf3d8317 100644 --- a/tests/testsuite/support/resolver.rs +++ b/tests/testsuite/support/resolver.rs @@ -115,7 +115,6 @@ pub fn resolve_with_config_raw( &HashSet::new(), config, true, - true, ); // The largest test in our suite takes less then 30 sec.