diff --git a/Cargo.lock b/Cargo.lock index 010c4bce5b5..4dd9d66df1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,7 +8,7 @@ dependencies = [ "git2 0.0.1 (git+https://github.com/alexcrichton/git2-rs#c01b0b279470552c48cf7f902ae5baf132df0a6d)", "glob 0.0.1 (git+https://github.com/rust-lang/glob#469a6bc1a0fc289ab220170e691cffbc01dcf1e6)", "hamcrest 0.1.0 (git+https://github.com/carllerche/hamcrest-rust.git#7d46e76514ac606530dfb0e93270fffcf64ca76d)", - "semver 0.1.0 (git+https://github.com/rust-lang/semver#5dee918159d4e3d84c1ee54cf6a5305e6f7bd557)", + "semver 0.1.0 (git+https://github.com/rust-lang/semver#9bb8265ea6cf01eddfa7dc5ec9334883443e9fc7)", "tar 0.0.1 (git+https://github.com/alexcrichton/tar-rs#943d7c0173c7fa5e8c58978add0180569c68d7f7)", "toml 0.1.0 (git+https://github.com/alexcrichton/toml-rs#8a3ba4c65cfa22a3d924293a1fb3a70bfac5e66c)", "url 0.1.0 (git+https://github.com/servo/rust-url#7f1991d847fb8cf8648def2ff121bae90b89131f)", @@ -91,7 +91,7 @@ source = "git+https://github.com/alexcrichton/openssl-static-sys#d218fa63aabefb3 [[package]] name = "semver" version = "0.1.0" -source = "git+https://github.com/rust-lang/semver#5dee918159d4e3d84c1ee54cf6a5305e6f7bd557" +source = "git+https://github.com/rust-lang/semver#9bb8265ea6cf01eddfa7dc5ec9334883443e9fc7" [[package]] name = "tar" diff --git a/Cargo.toml b/Cargo.toml index b6e0d6e9d32..a397a6a6696 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,3 +46,6 @@ doc = false [[test]] name = "tests" + +[[test]] +name = "resolve" diff --git a/Makefile.in b/Makefile.in index 73aedcaf2c0..4b09b9ec7c4 100644 --- a/Makefile.in +++ b/Makefile.in @@ -89,7 +89,7 @@ style: sh tests/check-style.sh no-exes: - find $$(git ls-files | grep -v ttf) -perm +111 -type f \ + find $$(git ls-files) -perm +111 -type f \ -not -name configure -not -name '*.sh' -not -name '*.rs' | \ grep '.*' \ && exit 1 || exit 0 diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index 855272e96ea..1442a781563 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -1,5 +1,6 @@ use semver; use std::hash::Hash; +use std::sync::Arc; use std::fmt::{mod, Show, Formatter}; use std::hash; use serialize::{Encodable, Encoder, Decodable, Decoder}; @@ -10,8 +11,13 @@ use util::{CargoResult, CargoError, short_hash, ToSemver}; use core::source::SourceId; /// Identifier for a specific version of a package in a specific source. -#[deriving(Clone, PartialEq, PartialOrd, Ord)] +#[deriving(Clone)] pub struct PackageId { + inner: Arc, +} + +#[deriving(PartialEq, PartialOrd, Eq, Ord)] +struct PackageIdInner { name: String, version: semver::Version, source_id: SourceId, @@ -19,8 +25,9 @@ pub struct PackageId { impl> Encodable for PackageId { fn encode(&self, s: &mut S) -> Result<(), E> { - let source = self.source_id.to_url(); - let encoded = format!("{} {} ({})", self.name, self.version, source); + let source = self.inner.source_id.to_url(); + let encoded = format!("{} {} ({})", self.inner.name, self.inner.version, + source); encoded.encode(s) } } @@ -36,22 +43,39 @@ impl> Decodable for PackageId { let source_id = SourceId::from_url(captures.at(3).to_string()); Ok(PackageId { - name: name.to_string(), - version: version, - source_id: source_id + inner: Arc::new(PackageIdInner { + name: name.to_string(), + version: version, + source_id: source_id, + }), }) } } impl Hash for PackageId { fn hash(&self, state: &mut S) { - self.name.hash(state); - self.version.to_string().hash(state); - self.source_id.hash(state); + self.inner.name.hash(state); + self.inner.version.to_string().hash(state); + self.inner.source_id.hash(state); } } +impl PartialEq for PackageId { + fn eq(&self, other: &PackageId) -> bool { + self.inner.eq(&*other.inner) + } +} +impl PartialOrd for PackageId { + fn partial_cmp(&self, other: &PackageId) -> Option { + self.inner.partial_cmp(&*other.inner) + } +} impl Eq for PackageId {} +impl Ord for PackageId { + fn cmp(&self, other: &PackageId) -> Ordering { + self.inner.cmp(&*other.inner) + } +} #[deriving(Clone, Show, PartialEq)] pub enum PackageIdError { @@ -80,27 +104,30 @@ impl PackageId { sid: &SourceId) -> CargoResult { let v = try!(version.to_semver().map_err(InvalidVersion)); Ok(PackageId { - name: name.to_string(), - version: v, - source_id: sid.clone() + inner: Arc::new(PackageIdInner { + name: name.to_string(), + version: v, + source_id: sid.clone(), + }), }) } pub fn get_name(&self) -> &str { - self.name.as_slice() + self.inner.name.as_slice() } pub fn get_version(&self) -> &semver::Version { - &self.version + &self.inner.version } pub fn get_source_id(&self) -> &SourceId { - &self.source_id + &self.inner.source_id } pub fn generate_metadata(&self) -> Metadata { let metadata = short_hash( - &(self.name.as_slice(), self.version.to_string(), &self.source_id)); + &(self.inner.name.as_slice(), self.inner.version.to_string(), + &self.inner.source_id)); let extra_filename = format!("-{}", metadata); Metadata { metadata: metadata, extra_filename: extra_filename } @@ -119,10 +146,10 @@ static CENTRAL_REPO: &'static str = "http://rust-lang.org/central-repo"; impl Show for PackageId { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - try!(write!(f, "{} v{}", self.name, self.version)); + try!(write!(f, "{} v{}", self.inner.name, self.inner.version)); - if self.source_id.to_string().as_slice() != CENTRAL_REPO { - try!(write!(f, " ({})", self.source_id)); + if self.inner.source_id.to_string().as_slice() != CENTRAL_REPO { + try!(write!(f, " ({})", self.inner.source_id)); } Ok(()) diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs index bfce8cd1e1d..7ce03616d90 100644 --- a/src/cargo/core/package_id_spec.rs +++ b/src/cargo/core/package_id_spec.rs @@ -49,7 +49,7 @@ impl PackageIdSpec { PackageIdSpec { name: package_id.get_name().to_string(), version: Some(package_id.get_version().clone()), - url: Some(package_id.get_source_id().url.clone()), + url: Some(package_id.get_source_id().get_url().clone()), } } @@ -110,7 +110,7 @@ impl PackageIdSpec { } match self.url { - Some(ref u) => *u == package_id.get_source_id().url, + Some(ref u) => u == package_id.get_source_id().get_url(), None => true } } diff --git a/src/cargo/core/resolver.rs b/src/cargo/core/resolver.rs deleted file mode 100644 index 2e846e3151b..00000000000 --- a/src/cargo/core/resolver.rs +++ /dev/null @@ -1,741 +0,0 @@ -use std::collections::hashmap::{HashMap, HashSet, Occupied, Vacant}; -use std::fmt; - -use regex::Regex; -use semver; -use serialize::{Encodable, Encoder, Decodable, Decoder}; - -use core::{PackageId, Registry, SourceId, Summary, Dependency}; -use core::PackageIdSpec; -use util::{CargoResult, Graph, human, internal, ChainError}; -use util::profile; -use util::graph::{Nodes, Edges}; - -/// Result of the `resolve` function. -#[deriving(PartialEq, Eq)] -pub struct Resolve { - graph: Graph, - features: HashMap>, - root: PackageId -} - -pub enum ResolveMethod<'a> { - ResolveEverything, - ResolveRequired(/* dev_deps = */ bool, - /* features = */ &'a [String], - /* uses_default_features = */ bool), -} - -#[deriving(Encodable, Decodable, Show)] -pub struct EncodableResolve { - package: Option>, - root: EncodableDependency -} - -impl EncodableResolve { - pub fn to_resolve(&self, default: &SourceId) -> CargoResult { - let mut g = Graph::new(); - - try!(add_pkg_to_graph(&mut g, &self.root, default)); - - match self.package { - Some(ref packages) => { - for dep in packages.iter() { - try!(add_pkg_to_graph(&mut g, dep, default)); - } - } - None => {} - } - - let root = self.root.to_package_id(default); - Ok(Resolve { graph: g, root: try!(root), features: HashMap::new() }) - } -} - -fn add_pkg_to_graph(g: &mut Graph, - dep: &EncodableDependency, - default: &SourceId) - -> CargoResult<()> -{ - let package_id = try!(dep.to_package_id(default)); - g.add(package_id.clone(), []); - - match dep.dependencies { - Some(ref deps) => { - for edge in deps.iter() { - g.link(package_id.clone(), try!(edge.to_package_id(default))); - } - }, - _ => () - }; - - Ok(()) -} - -#[deriving(Encodable, Decodable, Show, PartialOrd, Ord, PartialEq, Eq)] -pub struct EncodableDependency { - name: String, - version: String, - source: Option, - dependencies: Option> -} - -impl EncodableDependency { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - self.name.as_slice(), - self.version.as_slice(), - self.source.as_ref().unwrap_or(default_source)) - } -} - -#[deriving(Show, PartialOrd, Ord, PartialEq, Eq)] -pub struct EncodablePackageId { - name: String, - version: String, - source: Option -} - -impl> Encodable for EncodablePackageId { - fn encode(&self, s: &mut S) -> Result<(), E> { - let mut out = format!("{} {}", self.name, self.version); - if let Some(ref s) = self.source { - out.push_str(format!(" ({})", s.to_url()).as_slice()); - } - out.encode(s) - } -} - -impl> Decodable for EncodablePackageId { - fn decode(d: &mut D) -> Result { - let string: String = raw_try!(Decodable::decode(d)); - let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = regex.captures(string.as_slice()) - .expect("invalid serialized PackageId"); - - let name = captures.at(1); - let version = captures.at(2); - - let source = captures.at(3); - - let source_id = if source == "" { - None - } else { - Some(SourceId::from_url(source.to_string())) - }; - - Ok(EncodablePackageId { - name: name.to_string(), - version: version.to_string(), - source: source_id - }) - } -} - -impl EncodablePackageId { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - self.name.as_slice(), - self.version.as_slice(), - self.source.as_ref().unwrap_or(default_source)) - } -} - -impl> Encodable for Resolve { - fn encode(&self, s: &mut S) -> Result<(), E> { - let mut ids: Vec<&PackageId> = self.graph.iter().collect(); - ids.sort(); - - let encodable = ids.iter().filter_map(|&id| { - if self.root == *id { return None; } - - Some(encodable_resolve_node(id, &self.root, &self.graph)) - }).collect::>(); - - EncodableResolve { - package: Some(encodable), - root: encodable_resolve_node(&self.root, &self.root, &self.graph) - }.encode(s) - } -} - -fn encodable_resolve_node(id: &PackageId, root: &PackageId, - graph: &Graph) -> EncodableDependency { - let deps = graph.edges(id).map(|edge| { - let mut deps = edge.map(|e| { - encodable_package_id(e, root) - }).collect::>(); - deps.sort(); - deps - }); - - let source = if id.get_source_id() == root.get_source_id() { - None - } else { - Some(id.get_source_id().clone()) - }; - - EncodableDependency { - name: id.get_name().to_string(), - version: id.get_version().to_string(), - source: source, - dependencies: deps, - } -} - -fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { - let source = if id.get_source_id() == root.get_source_id() { - None - } else { - Some(id.get_source_id().clone()) - }; - EncodablePackageId { - name: id.get_name().to_string(), - version: id.get_version().to_string(), - source: source, - } -} - -impl Resolve { - fn new(root: PackageId) -> Resolve { - let mut g = Graph::new(); - g.add(root.clone(), []); - Resolve { graph: g, root: root, features: HashMap::new() } - } - - pub fn iter(&self) -> Nodes { - self.graph.iter() - } - - pub fn root(&self) -> &PackageId { &self.root } - - pub fn deps(&self, pkg: &PackageId) -> Option> { - self.graph.edges(pkg) - } - - pub fn query(&self, spec: &str) -> CargoResult<&PackageId> { - let spec = try!(PackageIdSpec::parse(spec).chain_error(|| { - human(format!("invalid package id specification: `{}`", spec)) - })); - let mut ids = self.iter().filter(|p| spec.matches(*p)); - let ret = match ids.next() { - Some(id) => id, - None => return Err(human(format!("package id specification `{}` \ - matched no packages", spec))), - }; - return match ids.next() { - Some(other) => { - let mut msg = format!("There are multiple `{}` packages in \ - your project, and the specification \ - `{}` is ambiguous.\n\ - Please re-run this command \ - with `-p ` where `` is one \ - of the following:", - spec.get_name(), spec); - let mut vec = vec![ret, other]; - vec.extend(ids); - minimize(&mut msg, vec, &spec); - Err(human(msg)) - } - None => Ok(ret) - }; - - fn minimize(msg: &mut String, - ids: Vec<&PackageId>, - spec: &PackageIdSpec) { - let mut version_cnt = HashMap::new(); - for id in ids.iter() { - let slot = match version_cnt.entry(id.get_version()) { - Occupied(e) => e.into_mut(), - Vacant(e) => e.set(0u), - }; - *slot += 1; - } - for id in ids.iter() { - if version_cnt[id.get_version()] == 1 { - msg.push_str(format!("\n {}:{}", spec.get_name(), - id.get_version()).as_slice()); - } else { - msg.push_str(format!("\n {}", - PackageIdSpec::from_package_id(*id)) - .as_slice()); - } - } - } - } - - pub fn features(&self, pkg: &PackageId) -> Option<&HashSet> { - self.features.find(pkg) - } -} - -impl fmt::Show for Resolve { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - self.graph.fmt(fmt) - } -} - -struct Context<'a, R:'a> { - registry: &'a mut R, - resolve: Resolve, - // cycle detection - visited: HashSet, - - // Try not to re-resolve too much - resolved: HashMap>, - - // Eventually, we will have smarter logic for checking for conflicts in the - // resolve, but without the registry, conflicts should not exist in - // practice, so this is just a sanity check. - seen: HashMap<(String, SourceId), semver::Version>, -} - -impl<'a, R: Registry> Context<'a, R> { - fn new(registry: &'a mut R, root: PackageId) -> Context<'a, R> { - Context { - registry: registry, - resolve: Resolve::new(root), - seen: HashMap::new(), - visited: HashSet::new(), - resolved: HashMap::new(), - } - } -} - -/// Builds the list of all packages required to build the first argument. -pub fn resolve(summary: &Summary, method: ResolveMethod, - registry: &mut R) -> CargoResult { - log!(5, "resolve; summary={}", summary); - let _p = profile::start(format!("resolving: {}", summary)); - - let mut context = Context::new(registry, summary.get_package_id().clone()); - context.seen.insert((summary.get_name().to_string(), - summary.get_source_id().clone()), - summary.get_version().clone()); - try!(resolve_deps(summary, method, &mut context)); - log!(5, " result={}", context.resolve); - Ok(context.resolve) -} - -fn resolve_deps<'a, R: Registry>(parent: &Summary, - method: ResolveMethod, - ctx: &mut Context<'a, R>) - -> CargoResult<()> { - let (deps, features) = try!(resolve_features(parent, method, ctx)); - - // Recursively resolve all dependencies - for &dep in deps.iter() { - if !match ctx.resolved.entry(parent.get_package_id().clone()) { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.set(HashSet::new()), - }.insert(dep.get_name().to_string()) { - continue - } - - let pkgs = try!(ctx.registry.query(dep)); - if pkgs.is_empty() { - return Err(human(format!("No package named `{}` found \ - (required by `{}`).\n\ - Location searched: {}\n\ - Version required: {}", - dep.get_name(), - parent.get_package_id().get_name(), - dep.get_source_id(), - dep.get_version_req()))); - } else if pkgs.len() > 1 { - return Err(internal(format!("At the moment, Cargo only supports a \ - single source for a particular package name ({}).", dep))); - } - - let summary = &pkgs[0]; - let name = summary.get_name().to_string(); - let source_id = summary.get_source_id().clone(); - let version = summary.get_version(); - - ctx.resolve.graph.link(parent.get_package_id().clone(), - summary.get_package_id().clone()); - - let found = match ctx.seen.find(&(name.clone(), source_id.clone())) { - Some(v) if v == version => true, - Some(..) => { - return Err(human(format!("Cargo found multiple copies of {} in \ - {}. This is not currently supported", - summary.get_name(), - summary.get_source_id()))); - } - None => false, - }; - if !found { - ctx.seen.insert((name, source_id), version.clone()); - ctx.resolve.graph.add(summary.get_package_id().clone(), []); - } - - // Dependency graphs are required to be a DAG. Non-transitive - // dependencies (dev-deps), however, can never introduce a cycle, so we - // skip them. - if dep.is_transitive() && - !ctx.visited.insert(summary.get_package_id().clone()) { - return Err(human(format!("Cyclic package dependency: package `{}` \ - depends on itself", - summary.get_package_id()))) - } - - // The list of enabled features for this dependency are both those - // listed in the dependency itself as well as any of our own features - // which enabled a feature of this package. - let features = features.find_equiv(&dep.get_name()) - .map(|v| v.as_slice()) - .unwrap_or(&[]); - try!(resolve_deps(summary, - ResolveRequired(false, features, - dep.uses_default_features()), - ctx)); - if dep.is_transitive() { - ctx.visited.remove(summary.get_package_id()); - } - } - - Ok(()) -} - -fn resolve_features<'a, R>(parent: &'a Summary, method: ResolveMethod, - ctx: &mut Context) - -> CargoResult<(Vec<&'a Dependency>, - HashMap<&'a str, Vec>)> { - let dev_deps = match method { - ResolveEverything => true, - ResolveRequired(dev_deps, _, _) => dev_deps, - }; - - // First, filter by dev-dependencies - let deps = parent.get_dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); - - // Second, weed out optional dependencies, but not those required - let (mut feature_deps, used_features) = try!(build_features(parent, method)); - let mut ret = HashMap::new(); - let deps = deps.filter(|d| { - !d.is_optional() || feature_deps.contains_key_equiv(&d.get_name()) - }).collect::>(); - - // Next, sanitize all requested features by whitelisting all the requested - // features that correspond to optional dependencies - for dep in deps.iter() { - let mut base = feature_deps.pop_equiv(&dep.get_name()) - .unwrap_or(Vec::new()); - for feature in dep.get_features().iter() { - base.push(feature.clone()); - if feature.as_slice().contains("/") { - return Err(human(format!("features in dependencies \ - cannot enable features in \ - other dependencies: `{}`", - feature))); - } - } - ret.insert(dep.get_name(), base); - } - - // All features can only point to optional dependencies, in which case they - // should have all been weeded out by the above iteration. Any remaining - // features are bugs in that the package does not actually have those - // features. - if feature_deps.len() > 0 { - let unknown = feature_deps.keys().map(|s| s.as_slice()) - .collect::>(); - if unknown.len() > 0 { - let features = unknown.connect(", "); - return Err(human(format!("Package `{}` does not have these features: \ - `{}`", parent.get_package_id(), features))) - } - } - - // Record what list of features is active for this package. - { - let pkgid = parent.get_package_id().clone(); - match ctx.resolve.features.entry(pkgid) { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.set(HashSet::new()), - }.extend(used_features.into_iter()); - } - - Ok((deps, ret)) -} - -// Returns a pair of (feature dependencies, all used features) -// -// The feature dependencies map is a mapping of package name to list of features -// enabled. Each package should be enabled, and each package should have the -// specified set of features enabled. -// -// The all used features set is the set of features which this local package had -// enabled, which is later used when compiling to instruct the code what -// features were enabled. -fn build_features(s: &Summary, method: ResolveMethod) - -> CargoResult<(HashMap>, HashSet)> { - let mut deps = HashMap::new(); - let mut used = HashSet::new(); - let mut visited = HashSet::new(); - match method { - ResolveEverything => { - for key in s.get_features().keys() { - try!(add_feature(s, key.as_slice(), &mut deps, &mut used, - &mut visited)); - } - } - ResolveRequired(_, requested_features, _) => { - for feat in requested_features.iter() { - try!(add_feature(s, feat.as_slice(), &mut deps, &mut used, - &mut visited)); - } - } - } - match method { - ResolveEverything | ResolveRequired(_, _, true) => { - if s.get_features().find_equiv(&"default").is_some() && - !visited.contains_equiv(&"default") { - try!(add_feature(s, "default", &mut deps, &mut used, - &mut visited)); - } - } - _ => {} - } - return Ok((deps, used)); - - fn add_feature(s: &Summary, feat: &str, - deps: &mut HashMap>, - used: &mut HashSet, - visited: &mut HashSet) -> CargoResult<()> { - if feat.is_empty() { return Ok(()) } - - // If this feature is of the form `foo/bar`, then we just lookup package - // `foo` and enable its feature `bar`. Otherwise this feature is of the - // form `foo` and we need to recurse to enable the feature `foo` for our - // own package, which may end up enabling more features or just enabling - // a dependency. - let mut parts = feat.splitn(1, '/'); - let feat_or_package = parts.next().unwrap(); - match parts.next() { - Some(feat) => { - let package = feat_or_package; - match deps.entry(package.to_string()) { - Occupied(e) => e.into_mut(), - Vacant(e) => e.set(Vec::new()), - }.push(feat.to_string()); - } - None => { - let feat = feat_or_package; - if !visited.insert(feat.to_string()) { - return Err(human(format!("Cyclic feature dependency: \ - feature `{}` depends on itself", - feat))) - } - used.insert(feat.to_string()); - match s.get_features().find_equiv(&feat) { - Some(recursive) => { - for f in recursive.iter() { - try!(add_feature(s, f.as_slice(), deps, used, - visited)); - } - } - None => { - match deps.entry(feat.to_string()) { - Occupied(..) => {} // already activated - Vacant(e) => { e.set(Vec::new()); } - } - } - } - visited.remove(&feat.to_string()); - } - } - Ok(()) - } -} - -#[cfg(test)] -mod test { - use std::collections::HashMap; - - use hamcrest::{assert_that, equal_to, contains}; - - use core::source::{SourceId, RegistryKind, GitKind}; - use core::{Dependency, PackageId, Summary, Registry}; - use util::{CargoResult, ToUrl}; - - fn resolve(pkg: PackageId, deps: Vec, - registry: &mut R) - -> CargoResult> { - let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); - let method = super::ResolveEverything; - Ok(try!(super::resolve(&summary, method, - registry)).iter().map(|p| p.clone()).collect()) - } - - trait ToDep { - fn to_dep(self) -> Dependency; - } - - impl ToDep for &'static str { - fn to_dep(self) -> Dependency { - let url = "http://example.com".to_url().unwrap(); - let source_id = SourceId::new(RegistryKind, url); - Dependency::parse(self, Some("1.0.0"), &source_id).unwrap() - } - } - - impl ToDep for Dependency { - fn to_dep(self) -> Dependency { - self - } - } - - macro_rules! pkg( - ($name:expr => $($deps:expr),+) => ({ - let d: Vec = vec!($($deps.to_dep()),+); - - Summary::new(PackageId::new($name, "1.0.0", ®istry_loc()).unwrap(), - d, HashMap::new()).unwrap() - }); - - ($name:expr) => ( - Summary::new(PackageId::new($name, "1.0.0", ®istry_loc()).unwrap(), - Vec::new(), HashMap::new()).unwrap() - ) - ) - - fn registry_loc() -> SourceId { - let remote = "http://example.com".to_url().unwrap(); - SourceId::new(RegistryKind, remote) - } - - fn pkg(name: &str) -> Summary { - Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap() - } - - fn pkg_id(name: &str) -> PackageId { - PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() - } - - fn pkg_id_loc(name: &str, loc: &str) -> PackageId { - let remote = loc.to_url(); - let source_id = SourceId::new(GitKind("master".to_string()), - remote.unwrap()); - - PackageId::new(name, "1.0.0", &source_id).unwrap() - } - - fn pkg_loc(name: &str, loc: &str) -> Summary { - Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap() - } - - fn dep(name: &str) -> Dependency { - let url = "http://example.com".to_url().unwrap(); - let source_id = SourceId::new(RegistryKind, url); - Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() - } - - fn dep_loc(name: &str, location: &str) -> Dependency { - let url = location.to_url().unwrap(); - let source_id = SourceId::new(GitKind("master".to_string()), url); - Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() - } - - fn registry(pkgs: Vec) -> Vec { - pkgs - } - - fn names(names: &[&'static str]) -> Vec { - names.iter() - .map(|name| PackageId::new(*name, "1.0.0", ®istry_loc()).unwrap()) - .collect() - } - - fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { - names.iter() - .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() - } - - #[test] - pub fn test_resolving_empty_dependency_list() { - let res = resolve(pkg_id("root"), Vec::new(), - &mut registry(vec!())).unwrap(); - - assert_that(&res, equal_to(&names(["root"]))); - } - - #[test] - pub fn test_resolving_only_package() { - let mut reg = registry(vec!(pkg("foo"))); - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); - - assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); - } - - #[test] - pub fn test_resolving_one_dep() { - let mut reg = registry(vec!(pkg("foo"), pkg("bar"))); - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); - - assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); - } - - #[test] - pub fn test_resolving_multiple_deps() { - let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz"))); - let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "baz"])).exactly()); - } - - #[test] - pub fn test_resolving_transitive_deps() { - let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => "foo"))); - let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "bar"]))); - } - - #[test] - pub fn test_resolving_common_transitive_deps() { - let mut reg = registry(vec!(pkg!("foo" => "bar"), pkg!("bar"))); - let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "bar"]))); - } - - #[test] - pub fn test_resolving_with_same_name() { - let list = vec![pkg_loc("foo", "http://first.example.com"), - pkg_loc("bar", "http://second.example.com")]; - - let mut reg = registry(list); - let res = resolve(pkg_id("root"), - vec![dep_loc("foo", "http://first.example.com"), - dep_loc("bar", "http://second.example.com")], - &mut reg); - - let mut names = loc_names([("foo", "http://first.example.com"), - ("bar", "http://second.example.com")]); - - names.push(pkg_id("root")); - - assert_that(&res.unwrap(), contains(names).exactly()); - } - - #[test] - pub fn test_resolving_with_dev_deps() { - let mut reg = registry(vec!( - pkg!("foo" => "bar", dep("baz").transitive(false)), - pkg!("baz" => "bat", dep("bam").transitive(false)), - pkg!("bar"), - pkg!("bat") - )); - - let res = resolve(pkg_id("root"), - vec![dep("foo"), dep("baz").transitive(false)], - &mut reg).unwrap(); - - assert_that(&res, contains(names(["root", "foo", "bar", "baz"]))); - } -} diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs new file mode 100644 index 00000000000..2eab82fe3c1 --- /dev/null +++ b/src/cargo/core/resolver/encode.rs @@ -0,0 +1,179 @@ +use std::collections::HashMap; + +use regex::Regex; +use serialize::{Encodable, Encoder, Decodable, Decoder}; + +use core::{PackageId, SourceId}; +use util::{CargoResult, Graph}; + +use super::Resolve; + +#[deriving(Encodable, Decodable, Show)] +pub struct EncodableResolve { + package: Option>, + root: EncodableDependency +} + +impl EncodableResolve { + pub fn to_resolve(&self, default: &SourceId) -> CargoResult { + let mut g = Graph::new(); + + try!(add_pkg_to_graph(&mut g, &self.root, default)); + + match self.package { + Some(ref packages) => { + for dep in packages.iter() { + try!(add_pkg_to_graph(&mut g, dep, default)); + } + } + None => {} + } + + let root = self.root.to_package_id(default); + Ok(Resolve { graph: g, root: try!(root), features: HashMap::new() }) + } +} + +fn add_pkg_to_graph(g: &mut Graph, + dep: &EncodableDependency, + default: &SourceId) + -> CargoResult<()> +{ + let package_id = try!(dep.to_package_id(default)); + g.add(package_id.clone(), []); + + match dep.dependencies { + Some(ref deps) => { + for edge in deps.iter() { + g.link(package_id.clone(), try!(edge.to_package_id(default))); + } + }, + _ => () + }; + + Ok(()) +} + +#[deriving(Encodable, Decodable, Show, PartialOrd, Ord, PartialEq, Eq)] +pub struct EncodableDependency { + name: String, + version: String, + source: Option, + dependencies: Option> +} + +impl EncodableDependency { + fn to_package_id(&self, default_source: &SourceId) -> CargoResult { + PackageId::new( + self.name.as_slice(), + self.version.as_slice(), + self.source.as_ref().unwrap_or(default_source)) + } +} + +#[deriving(Show, PartialOrd, Ord, PartialEq, Eq)] +pub struct EncodablePackageId { + name: String, + version: String, + source: Option +} + +impl> Encodable for EncodablePackageId { + fn encode(&self, s: &mut S) -> Result<(), E> { + let mut out = format!("{} {}", self.name, self.version); + if let Some(ref s) = self.source { + out.push_str(format!(" ({})", s.to_url()).as_slice()); + } + out.encode(s) + } +} + +impl> Decodable for EncodablePackageId { + fn decode(d: &mut D) -> Result { + let string: String = raw_try!(Decodable::decode(d)); + let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); + let captures = regex.captures(string.as_slice()) + .expect("invalid serialized PackageId"); + + let name = captures.at(1); + let version = captures.at(2); + + let source = captures.at(3); + + let source_id = if source == "" { + None + } else { + Some(SourceId::from_url(source.to_string())) + }; + + Ok(EncodablePackageId { + name: name.to_string(), + version: version.to_string(), + source: source_id + }) + } +} + +impl EncodablePackageId { + fn to_package_id(&self, default_source: &SourceId) -> CargoResult { + PackageId::new( + self.name.as_slice(), + self.version.as_slice(), + self.source.as_ref().unwrap_or(default_source)) + } +} + +impl> Encodable for Resolve { + fn encode(&self, s: &mut S) -> Result<(), E> { + let mut ids: Vec<&PackageId> = self.graph.iter().collect(); + ids.sort(); + + let encodable = ids.iter().filter_map(|&id| { + if self.root == *id { return None; } + + Some(encodable_resolve_node(id, &self.root, &self.graph)) + }).collect::>(); + + EncodableResolve { + package: Some(encodable), + root: encodable_resolve_node(&self.root, &self.root, &self.graph) + }.encode(s) + } +} + +fn encodable_resolve_node(id: &PackageId, root: &PackageId, + graph: &Graph) -> EncodableDependency { + let deps = graph.edges(id).map(|edge| { + let mut deps = edge.map(|e| { + encodable_package_id(e, root) + }).collect::>(); + deps.sort(); + deps + }); + + let source = if id.get_source_id() == root.get_source_id() { + None + } else { + Some(id.get_source_id().clone()) + }; + + EncodableDependency { + name: id.get_name().to_string(), + version: id.get_version().to_string(), + source: source, + dependencies: deps, + } +} + +fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { + let source = if id.get_source_id() == root.get_source_id() { + None + } else { + Some(id.get_source_id().clone()) + }; + EncodablePackageId { + name: id.get_name().to_string(), + version: id.get_version().to_string(), + source: source, + } +} diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs new file mode 100644 index 00000000000..8909d1d82bd --- /dev/null +++ b/src/cargo/core/resolver/mod.rs @@ -0,0 +1,483 @@ +use std::cell::RefCell; +use std::collections::hashmap::{HashMap, HashSet, Occupied, Vacant}; +use std::fmt; +use std::rc::Rc; +use semver; + +use core::{PackageId, Registry, SourceId, Summary, Dependency}; +use core::PackageIdSpec; +use util::{CargoResult, Graph, human, ChainError}; +use util::profile; +use util::graph::{Nodes, Edges}; + +pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId}; + +mod encode; + +/// Represents a fully resolved package dependency graph. Each node in the graph +/// is a package and edges represent dependencies between packages. +/// +/// Each instance of `Resolve` also understands the full set of features used +/// for each package as well as what the root package is. +#[deriving(PartialEq, Eq, Clone)] +pub struct Resolve { + graph: Graph, + features: HashMap>, + root: PackageId +} + +pub enum ResolveMethod<'a> { + ResolveEverything, + ResolveRequired(/* dev_deps = */ bool, + /* features = */ &'a [String], + /* uses_default_features = */ bool), +} + +impl Resolve { + fn new(root: PackageId) -> Resolve { + let mut g = Graph::new(); + g.add(root.clone(), []); + Resolve { graph: g, root: root, features: HashMap::new() } + } + + pub fn iter(&self) -> Nodes { + self.graph.iter() + } + + pub fn root(&self) -> &PackageId { &self.root } + + pub fn deps(&self, pkg: &PackageId) -> Option> { + self.graph.edges(pkg) + } + + pub fn query(&self, spec: &str) -> CargoResult<&PackageId> { + let spec = try!(PackageIdSpec::parse(spec).chain_error(|| { + human(format!("invalid package id specification: `{}`", spec)) + })); + let mut ids = self.iter().filter(|p| spec.matches(*p)); + let ret = match ids.next() { + Some(id) => id, + None => return Err(human(format!("package id specification `{}` \ + matched no packages", spec))), + }; + return match ids.next() { + Some(other) => { + let mut msg = format!("There are multiple `{}` packages in \ + your project, and the specification \ + `{}` is ambiguous.\n\ + Please re-run this command \ + with `-p ` where `` is one \ + of the following:", + spec.get_name(), spec); + let mut vec = vec![ret, other]; + vec.extend(ids); + minimize(&mut msg, vec, &spec); + Err(human(msg)) + } + None => Ok(ret) + }; + + fn minimize(msg: &mut String, + ids: Vec<&PackageId>, + spec: &PackageIdSpec) { + let mut version_cnt = HashMap::new(); + for id in ids.iter() { + let slot = match version_cnt.entry(id.get_version()) { + Occupied(e) => e.into_mut(), + Vacant(e) => e.set(0u), + }; + *slot += 1; + } + for id in ids.iter() { + if version_cnt[id.get_version()] == 1 { + msg.push_str(format!("\n {}:{}", spec.get_name(), + id.get_version()).as_slice()); + } else { + msg.push_str(format!("\n {}", + PackageIdSpec::from_package_id(*id)) + .as_slice()); + } + } + } + } + + pub fn features(&self, pkg: &PackageId) -> Option<&HashSet> { + self.features.find(pkg) + } +} + +impl fmt::Show for Resolve { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + self.graph.fmt(fmt) + } +} + +#[deriving(Clone)] +struct Context { + activations: HashMap<(String, SourceId), Vec>>, + resolve: Resolve, + visited: Rc>>, +} + +/// Builds the list of all packages required to build the first argument. +pub fn resolve(summary: &Summary, method: ResolveMethod, + registry: &mut R) -> CargoResult { + log!(5, "resolve; summary={}", summary); + + let mut cx = Context { + resolve: Resolve::new(summary.get_package_id().clone()), + activations: HashMap::new(), + visited: Rc::new(RefCell::new(HashSet::new())), + }; + let _p = profile::start(format!("resolving: {}", summary)); + cx.activations.insert((summary.get_name().to_string(), + summary.get_source_id().clone()), + vec![Rc::new(summary.clone())]); + match try!(activate(cx, registry, summary, method)) { + Ok(cx) => Ok(cx.resolve), + Err(e) => Err(e), + } +} + +fn activate(mut cx: Context, + registry: &mut R, + parent: &Summary, + method: ResolveMethod) + -> CargoResult> { + // First, figure out our set of dependencies based on the requsted set of + // features. This also calculates what features we're going to enable for + // our own dependencies. + let deps = try!(resolve_features(&mut cx, parent, method)); + + // Next, transform all dependencies into a list of possible candidates which + // can satisfy that dependency. + let mut deps = try!(deps.into_iter().map(|(_dep_name, (dep, features))| { + let mut candidates = try!(registry.query(dep)); + // When we attempt versions for a package, we'll want to start at the + // maximum version and work our way down. + candidates.as_mut_slice().sort_by(|a, b| { + b.get_version().cmp(a.get_version()) + }); + let candidates = candidates.into_iter().map(Rc::new).collect::>(); + Ok((dep, candidates, features)) + }).collect::>>()); + + // When we recurse, attempt to resolve dependencies with fewer candidates + // before recursing on dependencies with more candidates. This way if the + // dependency with only one candidate can't be resolved we don't have to do + // a bunch of work before we figure that out. + deps.as_mut_slice().sort_by(|&(_, ref a, _), &(_, ref b, _)| { + a.len().cmp(&b.len()) + }); + + activate_deps(cx, registry, parent, deps.as_slice(), 0) +} + +fn activate_deps(cx: Context, + registry: &mut R, + parent: &Summary, + deps: &[(&Dependency, Vec>, Vec)], + cur: uint) -> CargoResult> { + if cur == deps.len() { return Ok(Ok(cx)) } + let (dep, ref candidates, ref features) = deps[cur]; + let method = ResolveRequired(false, features.as_slice(), + dep.uses_default_features()); + + let key = (dep.get_name().to_string(), dep.get_source_id().clone()); + let prev_active = cx.activations.find(&key) + .map(|v| v.as_slice()).unwrap_or(&[]); + log!(5, "{}[{}]>{} {} candidates", parent.get_name(), cur, dep.get_name(), + candidates.len()); + log!(5, "{}[{}]>{} {} prev activations", parent.get_name(), cur, + dep.get_name(), prev_active.len()); + + // Filter the set of candidates based on the previously activated + // versions for this dependency. We can actually use a version if it + // precisely matches an activated version or if it is otherwise + // incompatible with all other activated versions. Note that we define + // "compatible" here in terms of the semver sense where if the left-most + // nonzero digit is the same they're considered compatible. + let mut my_candidates = candidates.iter().filter(|&b| { + prev_active.iter().any(|a| a == b) || + prev_active.iter().all(|a| { + !compatible(a.get_version(), b.get_version()) + }) + }); + + // Alright, for each candidate that's gotten this far, it meets the + // following requirements: + // + // 1. The version matches the dependency requirement listed for this + // package + // 2. There are no activated versions for this package which are + // semver-compatible, or there's an activated version which is + // precisely equal to `candidate`. + // + // This means that we're going to attempt to activate each candidate in + // turn. We could possibly fail to activate each candidate, so we try + // each one in turn. + let mut last_err = None; + for candidate in my_candidates { + log!(5, "{}[{}]>{} trying {}", parent.get_name(), cur, dep.get_name(), + candidate.get_version()); + let mut my_cx = cx.clone(); + let early_return = { + my_cx.resolve.graph.link(parent.get_package_id().clone(), + candidate.get_package_id().clone()); + let prev = match my_cx.activations.entry(key.clone()) { + Occupied(e) => e.into_mut(), + Vacant(e) => e.set(Vec::new()), + }; + if prev.iter().any(|c| c == candidate) { + match cx.resolve.features(candidate.get_package_id()) { + Some(prev_features) => { + features.iter().all(|f| prev_features.contains(f)) + } + None => features.len() == 0, + } + } else { + my_cx.resolve.graph.add(candidate.get_package_id().clone(), []); + prev.push(candidate.clone()); + false + } + }; + + let my_cx = if early_return { + my_cx + } else { + // Dependency graphs are required to be a DAG. Non-transitive + // dependencies (dev-deps), however, can never introduce a cycle, so we + // skip them. + if dep.is_transitive() && + !cx.visited.borrow_mut().insert(candidate.get_package_id().clone()) { + return Err(human(format!("cyclic package dependency: package `{}` \ + depends on itself", + candidate.get_package_id()))) + } + let my_cx = try!(activate(my_cx, registry, &**candidate, method)); + if dep.is_transitive() { + cx.visited.borrow_mut().remove(candidate.get_package_id()); + } + match my_cx { + Ok(cx) => cx, + Err(e) => { last_err = Some(e); continue } + } + }; + match try!(activate_deps(my_cx, registry, parent, deps, cur + 1)) { + Ok(cx) => return Ok(Ok(cx)), + Err(e) => { last_err = Some(e); } + } + } + log!(5, "{}[{}]>{} -- {}", parent.get_name(), cur, dep.get_name(), last_err); + + // Oh well, we couldn't activate any of the candidates, so we just can't + // activate this dependency at all + Ok(match last_err { + Some(e) => Err(e), + None if candidates.len() > 0 => { + let mut msg = format!("failed to select a version for `{}` \ + (required by `{}`):\n\ + all possible versions conflict with \ + previously selected versions of `{}`", + dep.get_name(), parent.get_name(), + dep.get_name()); + 'outer: for v in prev_active.iter() { + for node in cx.resolve.graph.iter() { + let mut edges = match cx.resolve.graph.edges(node) { + Some(edges) => edges, + None => continue, + }; + for edge in edges { + if edge != v.get_package_id() { continue } + + msg.push_str(format!("\n version {} in use by {}", + v.get_version(), edge).as_slice()); + continue 'outer; + } + } + msg.push_str(format!("\n version {} in use by ??", + v.get_version()).as_slice()); + } + + msg.push_str(format!("\n possible versions to select: {}", + candidates.iter().map(|v| v.get_version()) + .collect::>()).as_slice()); + + Err(human(msg)) + } + None => { + Err(human(format!("no package named `{}` found (required by `{}`)\n\ + location searched: {}\n\ + version required: {}", + dep.get_name(), parent.get_name(), + dep.get_source_id(), + dep.get_version_req()))) + } + }) +} + +// Returns if `a` and `b` are compatible in the semver sense. This is a +// commutative operation. +// +// Versions `a` and `b` are compatible if their left-most nonzero digit is the +// same. +fn compatible(a: &semver::Version, b: &semver::Version) -> bool { + if a.major != b.major { return false } + if a.major != 0 { return true } + if a.minor != b.minor { return false } + if a.minor != 0 { return true } + a.patch == b.patch +} + +fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary, + method: ResolveMethod) + -> CargoResult)>> { + let dev_deps = match method { + ResolveEverything => true, + ResolveRequired(dev_deps, _, _) => dev_deps, + }; + + // First, filter by dev-dependencies + let deps = parent.get_dependencies(); + let mut deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + + let (mut feature_deps, used_features) = try!(build_features(parent, method)); + let mut ret = HashMap::new(); + + // Next, sanitize all requested features by whitelisting all the requested + // features that correspond to optional dependencies + for dep in deps { + // weed out optional dependencies, but not those required + if dep.is_optional() && !feature_deps.contains_key_equiv(&dep.get_name()) { + continue + } + let mut base = feature_deps.pop_equiv(&dep.get_name()) + .unwrap_or(Vec::new()); + for feature in dep.get_features().iter() { + base.push(feature.clone()); + if feature.as_slice().contains("/") { + return Err(human(format!("features in dependencies \ + cannot enable features in \ + other dependencies: `{}`", + feature))); + } + } + ret.insert(dep.get_name(), (dep, base)); + } + + // All features can only point to optional dependencies, in which case they + // should have all been weeded out by the above iteration. Any remaining + // features are bugs in that the package does not actually have those + // features. + if feature_deps.len() > 0 { + let unknown = feature_deps.keys().map(|s| s.as_slice()) + .collect::>(); + if unknown.len() > 0 { + let features = unknown.connect(", "); + return Err(human(format!("Package `{}` does not have these features: \ + `{}`", parent.get_package_id(), features))) + } + } + + // Record what list of features is active for this package. + if used_features.len() > 0 { + let pkgid = parent.get_package_id().clone(); + match cx.resolve.features.entry(pkgid) { + Occupied(entry) => entry.into_mut(), + Vacant(entry) => entry.set(HashSet::new()), + }.extend(used_features.into_iter()); + } + + Ok(ret) +} + +// Returns a pair of (feature dependencies, all used features) +// +// The feature dependencies map is a mapping of package name to list of features +// enabled. Each package should be enabled, and each package should have the +// specified set of features enabled. +// +// The all used features set is the set of features which this local package had +// enabled, which is later used when compiling to instruct the code what +// features were enabled. +fn build_features(s: &Summary, method: ResolveMethod) + -> CargoResult<(HashMap>, HashSet)> { + let mut deps = HashMap::new(); + let mut used = HashSet::new(); + let mut visited = HashSet::new(); + match method { + ResolveEverything => { + for key in s.get_features().keys() { + try!(add_feature(s, key.as_slice(), &mut deps, &mut used, + &mut visited)); + } + } + ResolveRequired(_, requested_features, _) => { + for feat in requested_features.iter() { + try!(add_feature(s, feat.as_slice(), &mut deps, &mut used, + &mut visited)); + } + } + } + match method { + ResolveEverything | ResolveRequired(_, _, true) => { + if s.get_features().find_equiv(&"default").is_some() && + !visited.contains_equiv(&"default") { + try!(add_feature(s, "default", &mut deps, &mut used, + &mut visited)); + } + } + _ => {} + } + return Ok((deps, used)); + + fn add_feature(s: &Summary, feat: &str, + deps: &mut HashMap>, + used: &mut HashSet, + visited: &mut HashSet) -> CargoResult<()> { + if feat.is_empty() { return Ok(()) } + + // If this feature is of the form `foo/bar`, then we just lookup package + // `foo` and enable its feature `bar`. Otherwise this feature is of the + // form `foo` and we need to recurse to enable the feature `foo` for our + // own package, which may end up enabling more features or just enabling + // a dependency. + let mut parts = feat.splitn(1, '/'); + let feat_or_package = parts.next().unwrap(); + match parts.next() { + Some(feat) => { + let package = feat_or_package; + match deps.entry(package.to_string()) { + Occupied(e) => e.into_mut(), + Vacant(e) => e.set(Vec::new()), + }.push(feat.to_string()); + } + None => { + let feat = feat_or_package; + if !visited.insert(feat.to_string()) { + return Err(human(format!("Cyclic feature dependency: \ + feature `{}` depends on itself", + feat))) + } + used.insert(feat.to_string()); + match s.get_features().find_equiv(&feat) { + Some(recursive) => { + for f in recursive.iter() { + try!(add_feature(s, f.as_slice(), deps, used, + visited)); + } + } + None => { + match deps.entry(feat.to_string()) { + Occupied(..) => {} // already activated + Vacant(e) => { e.set(Vec::new()); } + } + } + } + visited.remove(&feat.to_string()); + } + } + Ok(()) + } +} diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index 6b029b7008b..3740ca3eaf2 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -5,6 +5,7 @@ use std::fmt::{mod, Show, Formatter}; use std::hash; use std::iter; use std::mem; +use std::sync::Arc; use serialize::{Decodable, Decoder, Encodable, Encoder}; use url::Url; @@ -59,104 +60,26 @@ type Error = Box; /// Unique identifier for a source of packages. #[deriving(Clone, Eq)] pub struct SourceId { - pub url: Url, - pub kind: SourceKind, - // e.g. the exact git revision of the specified branch for a Git Source - pub precise: Option -} - -impl PartialOrd for SourceId { - fn partial_cmp(&self, other: &SourceId) -> Option { - self.to_string().partial_cmp(&other.to_string()) - } -} - -impl Ord for SourceId { - fn cmp(&self, other: &SourceId) -> Ordering { - self.to_string().cmp(&other.to_string()) - } -} - -impl> Encodable for SourceId { - fn encode(&self, s: &mut S) -> Result<(), E> { - if self.is_path() { - s.emit_option_none() - } else { - self.to_url().encode(s) - } - } -} - -impl> Decodable for SourceId { - fn decode(d: &mut D) -> Result { - let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); - Ok(SourceId::from_url(string)) - } -} - -impl Show for SourceId { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self { - SourceId { kind: PathKind, ref url, .. } => url.fmt(f), - SourceId { kind: GitKind(ref reference), ref url, ref precise, .. } => { - try!(write!(f, "{}", url)); - if reference.as_slice() != "master" { - try!(write!(f, "?ref={}", reference)); - } - - match *precise { - Some(ref s) => { - try!(write!(f, "#{}", s.as_slice().slice_to(8))); - } - None => {} - } - Ok(()) - }, - SourceId { kind: RegistryKind, ref url, .. } => { - let default = RegistrySource::url().ok(); - if default.as_ref() == Some(url) { - write!(f, "the package registry") - } else { - write!(f, "registry {}", url) - } - } - } - } + inner: Arc, } -// This custom implementation handles situations such as when two git sources -// point at *almost* the same URL, but not quite, even when they actually point -// to the same repository. -impl PartialEq for SourceId { - fn eq(&self, other: &SourceId) -> bool { - if self.kind != other.kind { return false } - if self.url == other.url { return true } - - match (&self.kind, &other.kind, &self.url, &other.url) { - (&GitKind(ref ref1), &GitKind(ref ref2), u1, u2) => { - ref1 == ref2 && - git::canonicalize_url(u1) == git::canonicalize_url(u2) - } - _ => false, - } - } -} - -impl hash::Hash for SourceId { - fn hash(&self, into: &mut S) { - self.kind.hash(into); - match *self { - SourceId { kind: GitKind(..), ref url, .. } => { - git::canonicalize_url(url).hash(into) - } - _ => self.url.hash(into), - } - } +#[deriving(Eq, Clone)] +struct SourceIdInner { + url: Url, + kind: SourceKind, + // e.g. the exact git revision of the specified branch for a Git Source + precise: Option } impl SourceId { pub fn new(kind: SourceKind, url: Url) -> SourceId { - SourceId { kind: kind, url: url, precise: None } + SourceId { + inner: Arc::new(SourceIdInner { + kind: kind, + url: url, + precise: None, + }), + } } /// Parses a source URL and returns the corresponding ID. @@ -198,12 +121,12 @@ impl SourceId { } pub fn to_url(&self) -> String { - match *self { - SourceId { kind: PathKind, .. } => { + match *self.inner { + SourceIdInner { kind: PathKind, .. } => { fail!("Path sources are not included in the lockfile, \ so this is unimplemented") }, - SourceId { + SourceIdInner { kind: GitKind(ref reference), ref url, ref precise, .. } => { let ref_str = if reference.as_slice() != "master" { @@ -220,7 +143,7 @@ impl SourceId { format!("git+{}{}{}", url, ref_str, precise_str) }, - SourceId { kind: RegistryKind, .. } => { + SourceIdInner { kind: RegistryKind, .. } => { // TODO: Central registry vs. alternates "registry+https://crates.io/".to_string() } @@ -255,15 +178,19 @@ impl SourceId { } pub fn get_url(&self) -> &Url { - &self.url + &self.inner.url + } + + pub fn get_kind(&self) -> &SourceKind { + &self.inner.kind } pub fn is_path(&self) -> bool { - self.kind == PathKind + self.inner.kind == PathKind } pub fn is_git(&self) -> bool { - match self.kind { + match self.inner.kind { GitKind(_) => true, _ => false } @@ -272,10 +199,10 @@ impl SourceId { /// Creates an implementation of `Source` corresponding to this ID. pub fn load<'a>(&self, config: &'a mut Config) -> Box { log!(5, "loading SourceId; {}", self); - match self.kind { + match self.inner.kind { GitKind(..) => box GitSource::new(self, config) as Box, PathKind => { - let path = match self.url.to_file_path() { + let path = match self.inner.url.to_file_path() { Ok(p) => p, Err(()) => fail!("path sources cannot be remote"), }; @@ -285,10 +212,112 @@ impl SourceId { } } + pub fn get_precise(&self) -> Option<&str> { + self.inner.precise.as_ref().map(|s| s.as_slice()) + } + pub fn with_precise(&self, v: String) -> SourceId { SourceId { - precise: Some(v), - .. self.clone() + inner: Arc::new(SourceIdInner { + precise: Some(v), + .. (*self.inner).clone() + }), + } + } +} + +impl PartialEq for SourceId { + fn eq(&self, other: &SourceId) -> bool { + self.inner.eq(&*other.inner) + } +} + +impl PartialOrd for SourceId { + fn partial_cmp(&self, other: &SourceId) -> Option { + self.to_string().partial_cmp(&other.to_string()) + } +} + +impl Ord for SourceId { + fn cmp(&self, other: &SourceId) -> Ordering { + self.to_string().cmp(&other.to_string()) + } +} + +impl> Encodable for SourceId { + fn encode(&self, s: &mut S) -> Result<(), E> { + if self.is_path() { + s.emit_option_none() + } else { + self.to_url().encode(s) + } + } +} + +impl> Decodable for SourceId { + fn decode(d: &mut D) -> Result { + let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); + Ok(SourceId::from_url(string)) + } +} + +impl Show for SourceId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match *self.inner { + SourceIdInner { kind: PathKind, ref url, .. } => url.fmt(f), + SourceIdInner { kind: GitKind(ref reference), ref url, + ref precise, .. } => { + try!(write!(f, "{}", url)); + if reference.as_slice() != "master" { + try!(write!(f, "?ref={}", reference)); + } + + match *precise { + Some(ref s) => { + try!(write!(f, "#{}", s.as_slice().slice_to(8))); + } + None => {} + } + Ok(()) + }, + SourceIdInner { kind: RegistryKind, ref url, .. } => { + let default = RegistrySource::url().ok(); + if default.as_ref() == Some(url) { + write!(f, "the package registry") + } else { + write!(f, "registry {}", url) + } + } + } + } +} + +// This custom implementation handles situations such as when two git sources +// point at *almost* the same URL, but not quite, even when they actually point +// to the same repository. +impl PartialEq for SourceIdInner { + fn eq(&self, other: &SourceIdInner) -> bool { + if self.kind != other.kind { return false } + if self.url == other.url { return true } + + match (&self.kind, &other.kind, &self.url, &other.url) { + (&GitKind(ref ref1), &GitKind(ref ref2), u1, u2) => { + ref1 == ref2 && + git::canonicalize_url(u1) == git::canonicalize_url(u2) + } + _ => false, + } + } +} + +impl hash::Hash for SourceId { + fn hash(&self, into: &mut S) { + self.inner.kind.hash(into); + match *self.inner { + SourceIdInner { kind: GitKind(..), ref url, .. } => { + git::canonicalize_url(url).hash(into) + } + _ => self.inner.url.hash(into), } } } @@ -420,11 +449,11 @@ mod tests { let s1 = SourceId::new(GitKind("master".to_string()), loc); let loc = "git://github.com/foo/bar".to_url().unwrap(); - let mut s2 = SourceId::new(GitKind("master".to_string()), loc); + let s2 = SourceId::new(GitKind("master".to_string()), loc.clone()); assert_eq!(s1, s2); - s2.kind = GitKind("foo".to_string()); - assert!(s1 != s2); + let s3 = SourceId::new(GitKind("foo".to_string()), loc); + assert!(s1 != s3); } } diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index b794f8db427..04abb7eb835 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -8,7 +8,7 @@ use util::{CargoResult, human}; /// Subset of a `Manifest`. Contains only the most important informations about a package. /// /// Summaries are cloned, and should not be mutated after creation -#[deriving(Show,Clone,PartialEq)] +#[deriving(Show,Clone)] pub struct Summary { package_id: PackageId, dependencies: Vec, @@ -91,6 +91,12 @@ impl Summary { } } +impl PartialEq for Summary { + fn eq(&self, other: &Summary) -> bool { + self.package_id == other.package_id + } +} + pub trait SummaryVec { fn names(&self) -> Vec; } diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index 3f6f4e6a316..4027d3a2fbf 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -54,7 +54,7 @@ pub fn prepare_target(cx: &mut Context, pkg: &Package, target: &Target, // constant (which is the responsibility of the source) let use_pkg = { let doc = target.get_profile().is_doc(); - let path = match pkg.get_summary().get_source_id().kind { + let path = match *pkg.get_summary().get_source_id().get_kind() { PathKind => true, _ => false, }; diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index b68a6e9ef83..8938bac5599 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -28,7 +28,7 @@ impl<'a, 'b> GitSource<'a, 'b> { config: &'a mut Config<'b>) -> GitSource<'a, 'b> { assert!(source_id.is_git(), "id is not git, id={}", source_id); - let reference = match source_id.kind { + let reference = match *source_id.get_kind() { GitKind(ref reference) => reference, _ => fail!("Not a git source; id={}", source_id) }; @@ -42,9 +42,9 @@ impl<'a, 'b> GitSource<'a, 'b> { let checkout_path = config.git_checkout_path() .join(ident.as_slice()).join(reference.as_slice()); - let reference = match source_id.precise { - Some(ref s) => s, - None => reference, + let reference = match source_id.get_precise() { + Some(s) => s, + None => reference.as_slice(), }; GitSource { @@ -160,7 +160,7 @@ impl<'a, 'b> Source for GitSource<'a, 'b> { let actual_rev = self.remote.rev_for(&self.db_path, self.reference.as_slice()); let should_update = actual_rev.is_err() || - self.source_id.precise.is_none(); + self.source_id.get_precise().is_none(); let (repo, actual_rev) = if should_update { try!(self.config.shell().status("Updating", diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs index b83e4a8e965..5473b0b34bc 100644 --- a/src/cargo/util/graph.rs +++ b/src/cargo/util/graph.rs @@ -92,3 +92,9 @@ impl PartialEq for Graph { fn eq(&self, other: &Graph) -> bool { self.nodes.eq(&other.nodes) } } impl Eq for Graph {} + +impl Clone for Graph { + fn clone(&self) -> Graph { + Graph { nodes: self.nodes.clone() } + } +} diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index 6e14c33856d..72fd9516fdd 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -15,19 +15,19 @@ pub use self::to_semver::ToSemver; pub use self::vcs::{GitRepo, HgRepo}; pub use self::sha256::Sha256; -pub mod graph; -pub mod process_builder; pub mod config; -pub mod important_paths; -pub mod result; -pub mod toml; -pub mod paths; pub mod errors; +pub mod graph; pub mod hex; +pub mod important_paths; +pub mod paths; +pub mod process_builder; pub mod profile; +pub mod result; pub mod to_semver; -mod pool; +pub mod to_url; +pub mod toml; mod dependency_queue; -mod to_url; -mod vcs; +mod pool; mod sha256; +mod vcs; diff --git a/tests/resolve.rs b/tests/resolve.rs new file mode 100644 index 00000000000..30c8c04fca8 --- /dev/null +++ b/tests/resolve.rs @@ -0,0 +1,351 @@ +#![feature(macro_rules)] + +extern crate hamcrest; +extern crate cargo; + +use std::collections::HashMap; + +use hamcrest::{assert_that, equal_to, contains}; + +use cargo::core::source::{SourceId, RegistryKind, GitKind}; +use cargo::core::{Dependency, PackageId, Summary, Registry}; +use cargo::util::{CargoResult, ToUrl}; +use cargo::core::resolver::{mod, ResolveEverything}; + +fn resolve(pkg: PackageId, deps: Vec, + registry: &mut R) + -> CargoResult> { + let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); + let method = ResolveEverything; + Ok(try!(resolver::resolve(&summary, method, registry)).iter().map(|p| { + p.clone() + }).collect()) +} + +trait ToDep { + fn to_dep(self) -> Dependency; +} + +impl ToDep for &'static str { + fn to_dep(self) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::new(RegistryKind, url); + Dependency::parse(self, Some("1.0.0"), &source_id).unwrap() + } +} + +impl ToDep for Dependency { + fn to_dep(self) -> Dependency { + self + } +} + +trait ToPkgId { + fn to_pkgid(&self) -> PackageId; +} + +impl ToPkgId for &'static str { + fn to_pkgid(&self) -> PackageId { + PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() + } +} + +impl ToPkgId for (&'static str, &'static str) { + fn to_pkgid(&self) -> PackageId { + let (name, vers) = *self; + PackageId::new(name, vers, ®istry_loc()).unwrap() + } +} + +macro_rules! pkg( + ($pkgid:expr => [$($deps:expr),+]) => ({ + let d: Vec = vec![$($deps.to_dep()),+]; + + Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap() + }); + + ($pkgid:expr) => ( + Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap() + ) +) + +fn registry_loc() -> SourceId { + let remote = "http://example.com".to_url().unwrap(); + SourceId::new(RegistryKind, remote) +} + +fn pkg(name: &str) -> Summary { + Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap() +} + +fn pkg_id(name: &str) -> PackageId { + PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() +} + +fn pkg_id_loc(name: &str, loc: &str) -> PackageId { + let remote = loc.to_url(); + let source_id = SourceId::new(GitKind("master".to_string()), + remote.unwrap()); + + PackageId::new(name, "1.0.0", &source_id).unwrap() +} + +fn pkg_loc(name: &str, loc: &str) -> Summary { + Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap() +} + +fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") } +fn dep_req(name: &str, req: &str) -> Dependency { + let url = "http://example.com".to_url().unwrap(); + let source_id = SourceId::new(RegistryKind, url); + Dependency::parse(name, Some(req), &source_id).unwrap() +} + +fn dep_loc(name: &str, location: &str) -> Dependency { + let url = location.to_url().unwrap(); + let source_id = SourceId::new(GitKind("master".to_string()), url); + Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() +} + +fn registry(pkgs: Vec) -> Vec { + pkgs +} + +fn names(names: &[P]) -> Vec { + names.iter().map(|name| name.to_pkgid()).collect() +} + +fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { + names.iter() + .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() +} + +#[test] +fn test_resolving_empty_dependency_list() { + let res = resolve(pkg_id("root"), Vec::new(), + &mut registry(vec!())).unwrap(); + + assert_that(&res, equal_to(&names(["root"]))); +} + +#[test] +fn test_resolving_only_package() { + let mut reg = registry(vec!(pkg("foo"))); + let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); + + assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); +} + +#[test] +fn test_resolving_one_dep() { + let mut reg = registry(vec!(pkg("foo"), pkg("bar"))); + let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); + + assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly()); +} + +#[test] +fn test_resolving_multiple_deps() { + let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz"))); + let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], + &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "baz"])).exactly()); +} + +#[test] +fn test_resolving_transitive_deps() { + let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => ["foo"]))); + let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_common_transitive_deps() { + let mut reg = registry(vec!(pkg!("foo" => ["bar"]), pkg!("bar"))); + let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], + &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "bar"]))); +} + +#[test] +fn test_resolving_with_same_name() { + let list = vec![pkg_loc("foo", "http://first.example.com"), + pkg_loc("bar", "http://second.example.com")]; + + let mut reg = registry(list); + let res = resolve(pkg_id("root"), + vec![dep_loc("foo", "http://first.example.com"), + dep_loc("bar", "http://second.example.com")], + &mut reg); + + let mut names = loc_names([("foo", "http://first.example.com"), + ("bar", "http://second.example.com")]); + + names.push(pkg_id("root")); + + assert_that(&res.unwrap(), contains(names).exactly()); +} + +#[test] +fn test_resolving_with_dev_deps() { + let mut reg = registry(vec!( + pkg!("foo" => ["bar", dep("baz").transitive(false)]), + pkg!("baz" => ["bat", dep("bam").transitive(false)]), + pkg!("bar"), + pkg!("bat") + )); + + let res = resolve(pkg_id("root"), + vec![dep("foo"), dep("baz").transitive(false)], + &mut reg).unwrap(); + + assert_that(&res, contains(names(["root", "foo", "bar", "baz"]))); +} + +#[test] +fn resolving_with_many_versions() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + )); + + let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.2")]))); +} + +#[test] +fn resolving_with_specific_version() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + )); + + let res = resolve(pkg_id("root"), vec![dep_req("foo", "=1.0.1")], + &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.1")]))); +} + +#[test] +fn resolving_incompat_versions() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + pkg!("bar" => [dep_req("foo", "=1.0.2")]), + )); + + assert!(resolve(pkg_id("root"), vec![ + dep_req("foo", "=1.0.1"), + dep("bar"), + ], &mut reg).is_err()); +} + +#[test] +fn resolving_backtrack() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.2") => [dep("bar")]), + pkg!(("foo", "1.0.1") => [dep("baz")]), + pkg!("bar" => [dep_req("foo", "=2.0.2")]), + pkg!("baz"), + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "^1"), + ], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.1"), + ("baz", "1.0.0")]))); +} + +#[test] +fn resolving_allows_multiple_compatible_versions() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.0")), + pkg!(("foo", "2.0.0")), + pkg!(("foo", "0.1.0")), + pkg!(("foo", "0.2.0")), + + pkg!("bar" => ["d1", "d2", "d3", "d4"]), + pkg!("d1" => [dep_req("foo", "1")]), + pkg!("d2" => [dep_req("foo", "2")]), + pkg!("d3" => [dep_req("foo", "0.1")]), + pkg!("d4" => [dep_req("foo", "0.2")]), + )); + + let res = resolve(pkg_id("root"), vec![ + dep("bar"), + ], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.0"), + ("foo", "2.0.0"), + ("foo", "0.1.0"), + ("foo", "0.2.0"), + ("d1", "1.0.0"), + ("d2", "1.0.0"), + ("d3", "1.0.0"), + ("d4", "1.0.0"), + ("bar", "1.0.0")]))); +} + +#[test] +fn resolving_with_deep_backtracking() { + let mut reg = registry(vec!( + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "1"), + ], &mut reg).unwrap(); + + assert_that(&res, contains(names([("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "2.0.0"), + ("baz", "1.0.1")]))); +} + +#[test] +fn resolving_but_no_exists() { + let mut reg = registry(vec!( + )); + + let res = resolve(pkg_id("root"), vec![ + dep_req("foo", "1"), + ], &mut reg); + assert!(res.is_err()); + + assert_eq!(res.to_string().as_slice(), "Err(\ +no package named `foo` found (required by `root`) +location searched: registry http://example.com/ +version required: ^1\ +)"); +} + +#[test] +fn resolving_cycle() { + let mut reg = registry(vec!( + pkg!("foo" => ["foo"]), + )); + + let _ = resolve(pkg_id("root"), vec![ + dep_req("foo", "1"), + ], &mut reg); +} diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index 665e91e8494..d55b2e0afb9 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -501,9 +501,9 @@ test!(cargo_compile_with_dep_name_mismatch { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(format!( -r#"No package named `notquitebar` found (required by `foo`). -Location searched: {proj_dir} -Version required: * +r#"no package named `notquitebar` found (required by `foo`) +location searched: {proj_dir} +version required: * "#, proj_dir = p.url()))); }) @@ -1091,9 +1091,7 @@ test!(self_dependency { "#) .file("src/test.rs", "fn main() {}"); assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout("\ -[..] test v0.0.0 ([..]) -")); + execs().with_status(0)); }) #[cfg(not(windows))] diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index 944c2557219..2c598c15360 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -150,9 +150,9 @@ test!(nonexistent { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ -No package named `nonexistent` found (required by `foo`). -Location searched: the package registry -Version required: >= 0.0.0 +no package named `nonexistent` found (required by `foo`) +location searched: the package registry +version required: >= 0.0.0 ")); }) @@ -196,9 +196,9 @@ test!(update_registry { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr("\ -No package named `notyet` found (required by `foo`). -Location searched: the package registry -Version required: >= 0.0.0 +no package named `notyet` found (required by `foo`) +location searched: the package registry +version required: >= 0.0.0 ")); // Add the package and commit