From 690ce845f726424b8aeee178dcfec32eeeabdc2d Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 29 Feb 2016 22:13:11 -0800 Subject: [PATCH 1/7] Remove usage of PathSource::for_path This does much more I/O than Package::for_path and this is also on its way out. --- src/bin/read_manifest.rs | 8 ++------ src/cargo/ops/cargo_output_metadata.rs | 16 +++++----------- src/cargo/ops/cargo_package.rs | 5 +++-- 3 files changed, 10 insertions(+), 19 deletions(-) diff --git a/src/bin/read_manifest.rs b/src/bin/read_manifest.rs index 141ebfff9aa..1cbb0cff8c4 100644 --- a/src/bin/read_manifest.rs +++ b/src/bin/read_manifest.rs @@ -1,9 +1,8 @@ use std::env; -use cargo::core::{Package, Source}; +use cargo::core::Package; use cargo::util::{CliResult, Config}; use cargo::util::important_paths::{find_root_manifest_for_wd}; -use cargo::sources::{PathSource}; #[derive(RustcDecodable)] pub struct Options { @@ -32,9 +31,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult> let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); - let mut source = try!(PathSource::for_path(root.parent().unwrap(), config)); - try!(source.update()); - - let pkg = try!(source.root_package()); + let pkg = try!(Package::for_path(&root, config)); Ok(Some(pkg)) } diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 7a037828d03..79b308053d4 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -3,9 +3,8 @@ use std::path::Path; use rustc_serialize::{Encodable, Encoder}; use core::resolver::Resolve; -use core::{Source, Package, PackageId, PackageSet}; +use core::{Package, PackageId, PackageSet}; use ops; -use sources::PathSource; use util::config::Config; use util::CargoResult; @@ -35,10 +34,9 @@ pub fn output_metadata(opt: OutputMetadataOptions, config: &Config) -> CargoResu } fn metadata_no_deps(opt: OutputMetadataOptions, config: &Config) -> CargoResult { - let mut source = try!(PathSource::for_path(opt.manifest_path.parent().unwrap(), config)); - + let root = try!(Package::for_path(opt.manifest_path, config)); Ok(ExportInfo { - packages: vec![try!(source.root_package())], + packages: vec![root], resolve: None, version: VERSION, }) @@ -112,14 +110,10 @@ fn resolve_dependencies<'a>(manifest: &Path, features: Vec, no_default_features: bool) -> CargoResult<(PackageSet<'a>, Resolve)> { - let mut source = try!(PathSource::for_path(manifest.parent().unwrap(), config)); - try!(source.update()); - - let package = try!(source.root_package()); - + let package = try!(Package::for_path(manifest, config)); ops::resolve_dependencies(&package, config, - Some(Box::new(source)), + None, features, no_default_features) } diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 3ab236426b2..bf173f1642c 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -16,8 +16,9 @@ pub fn package(manifest_path: &Path, verify: bool, list: bool, metadata: bool) -> CargoResult> { - let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); + let path = manifest_path.parent().unwrap(); + let id = try!(SourceId::for_path(path)); + let mut src = PathSource::new(path, &id, config); let pkg = try!(src.root_package()); if metadata { From e56965fb747d40c32e085f63b249cf0202c1ddd3 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 29 Feb 2016 22:17:28 -0800 Subject: [PATCH 2/7] Remove hacks when hashing package ids Right now there's a few hacks here and there to "correctly" hash package ids by taking a package's root path into account instead of the path store in the package id. The purpose of this was to solve issues where the same package referenced from two locations ended up having two different hashes. This hack leaked, however, into the implementation of fingerprints which in turned ended up causing spurious rebuilds. Fix this problem once and for all by just defining hashing on package ids the natural and expected way. --- src/cargo/core/package.rs | 12 ++------ src/cargo/core/package_id.rs | 10 ++----- src/cargo/sources/git/source.rs | 5 ++-- src/cargo/sources/path.rs | 51 ++++++++++++++++----------------- src/cargo/util/toml.rs | 29 +++++++++++++++---- 5 files changed, 55 insertions(+), 52 deletions(-) diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index c3ec0dd6183..c8362ffa286 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -87,7 +87,7 @@ impl Package { } pub fn generate_metadata(&self) -> Metadata { - self.package_id().generate_metadata(self.root()) + self.package_id().generate_metadata() } } @@ -107,15 +107,7 @@ impl Eq for Package {} impl hash::Hash for Package { fn hash(&self, into: &mut H) { - // We want to be sure that a path-based package showing up at the same - // location always has the same hash. To that effect we don't hash the - // vanilla package ID if we're a path, but instead feed in our own root - // path. - if self.package_id().source_id().is_path() { - (0, self.root(), self.name(), self.package_id().version()).hash(into) - } else { - (1, self.package_id()).hash(into) - } + self.package_id().hash(into) } } diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index 25bfc2796ad..0d968aeeef0 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -3,7 +3,6 @@ use std::error::Error; use std::fmt::{self, Formatter}; use std::hash::Hash; use std::hash; -use std::path::Path; use std::sync::Arc; use regex::Regex; @@ -136,13 +135,8 @@ impl PackageId { pub fn version(&self) -> &semver::Version { &self.inner.version } pub fn source_id(&self) -> &SourceId { &self.inner.source_id } - pub fn generate_metadata(&self, source_root: &Path) -> Metadata { - // See comments in Package::hash for why we have this test - let metadata = if self.inner.source_id.is_path() { - short_hash(&(0, &self.inner.name, &self.inner.version, source_root)) - } else { - short_hash(&(1, self)) - }; + pub fn generate_metadata(&self) -> Metadata { + let metadata = short_hash(self); let extra_filename = format!("-{}", metadata); Metadata { metadata: metadata, extra_filename: extra_filename } diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index 0cdb02f25d3..95004aa7e5d 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -179,8 +179,9 @@ impl<'cfg> Source for GitSource<'cfg> { try!(repo.copy_to(actual_rev.clone(), &self.checkout_path)); let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); - let path_source = PathSource::new(&self.checkout_path, &source_id, - self.config); + let path_source = PathSource::new_recursive(&self.checkout_path, + &source_id, + self.config); self.path_source = Some(path_source); self.rev = Some(actual_rev); diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 26373c7ba7e..e354f7a06ce 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -18,30 +18,39 @@ pub struct PathSource<'cfg> { updated: bool, packages: Vec, config: &'cfg Config, + recursive: bool, } -// TODO: Figure out if packages should be discovered in new or self should be -// mut and packages are discovered in update impl<'cfg> PathSource<'cfg> { - pub fn for_path(path: &Path, config: &'cfg Config) - -> CargoResult> { - trace!("PathSource::for_path; path={}", path.display()); - Ok(PathSource::new(path, &try!(SourceId::for_path(path)), config)) - } - /// Invoked with an absolute path to a directory that contains a Cargo.toml. - /// The source will read the manifest and find any other packages contained - /// in the directory structure reachable by the root manifest. + /// + /// This source will only return the package at precisely the `path` + /// specified, and it will be an error if there's not a package at `path`. pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> { - trace!("new; id={}", id); - PathSource { id: id.clone(), path: path.to_path_buf(), updated: false, packages: Vec::new(), config: config, + recursive: false, + } + } + + /// Creates a new source which is walked recursively to discover packages. + /// + /// This is similar to the `new` method except that instead of requiring a + /// valid package to be present at `root` the folder is walked entirely to + /// crawl for packages. + /// + /// Note that this should be used with care and likely shouldn't be chosen + /// by default! + pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) + -> PathSource<'cfg> { + PathSource { + recursive: true, + .. PathSource::new(root, id, config) } } @@ -59,25 +68,13 @@ impl<'cfg> PathSource<'cfg> { pub fn read_packages(&self) -> CargoResult> { if self.updated { Ok(self.packages.clone()) - } else if (self.id.is_path() && self.id.precise().is_some()) || - self.id.is_registry() { - // If our source id is a path and it's listed with a precise - // version, then it means that we're not allowed to have nested - // dependencies (they've been rewritten to crates.io dependencies). - // - // If our source id is a registry dependency then crates are - // published one at a time so we don't recurse as well. Note that - // cargo by default doesn't package up nested dependencies but it - // may do so for custom-crafted tarballs. - // - // In these cases we specifically read just one package, not a list - // of packages. + } else if self.recursive { + ops::read_packages(&self.path, &self.id, self.config) + } else { let path = self.path.join("Cargo.toml"); let (pkg, _) = try!(ops::read_package(&path, &self.id, self.config)); Ok(vec![pkg]) - } else { - ops::read_packages(&self.path, &self.id, self.config) } } diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index 7ff7251b2e6..ccbb814ab41 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -295,6 +295,7 @@ struct Context<'a, 'b> { config: &'b Config, warnings: &'a mut Vec, platform: Option, + layout: &'a Layout, } // These functions produce the equivalent of specific manifest entries. One @@ -385,7 +386,7 @@ impl TomlManifest { } let pkgid = try!(project.to_package_id(source_id)); - let metadata = pkgid.generate_metadata(&layout.root); + let metadata = pkgid.generate_metadata(); // If we have no lib at all, use the inferred lib if available // If we have a lib with a path, we're done @@ -516,6 +517,7 @@ impl TomlManifest { config: config, warnings: &mut warnings, platform: None, + layout: &layout, }; // Collect the deps @@ -702,10 +704,27 @@ fn process_dependencies(cx: &mut Context, Some(SourceId::for_git(&loc, reference)) } None => { - details.path.as_ref().map(|path| { - cx.nested_paths.push(PathBuf::from(path)); - cx.source_id.clone() - }) + match details.path.as_ref() { + Some(path) => { + cx.nested_paths.push(PathBuf::from(path)); + // If the source id for the package we're parsing is a + // path source, then we normalize the path here to get + // rid of components like `..`. + // + // The purpose of this is to get a canonical id for the + // package that we're depending on to ensure that builds + // of this package always end up hashing to the same + // value no matter where it's built from. + if cx.source_id.is_path() { + let path = cx.layout.root.join(path); + let path = util::normalize_path(&path); + Some(try!(SourceId::for_path(&path))) + } else { + Some(cx.source_id.clone()) + } + } + None => None, + } } }.unwrap_or(try!(SourceId::for_central(cx.config))); From a71e57438c70f851c4b2bb5ff529c620133d5e7e Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 29 Feb 2016 22:19:24 -0800 Subject: [PATCH 3/7] Ensure overrides use recursive path sources This mirrors the behavior that they have today. The `load` method for path sources will by default return a non-recursive `PathSource` which unfortunately isn't what we want here. --- src/cargo/core/registry.rs | 8 +++----- src/cargo/ops/cargo_compile.rs | 37 +++++++++++++++++----------------- 2 files changed, 22 insertions(+), 23 deletions(-) diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index f744f4937be..c005703860a 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -143,11 +143,9 @@ impl<'cfg> PackageRegistry<'cfg> { self.source_ids.insert(id.clone(), (id.clone(), kind)); } - pub fn add_overrides(&mut self, ids: Vec) -> CargoResult<()> { - for id in ids.iter() { - try!(self.load(id, Kind::Override)); - } - Ok(()) + pub fn add_override(&mut self, id: &SourceId, source: Box) { + self.add_source(id, source, Kind::Override); + self.overrides.push(id.clone()); } pub fn register_lock(&mut self, id: PackageId, deps: Vec) { diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index a4ef08528be..e24e6f2cf2c 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -32,8 +32,9 @@ use core::{Source, SourceId, PackageSet, Package, Target}; use core::{Profile, TargetKind, Profiles}; use core::resolver::{Method, Resolve}; use ops::{self, BuildOutput, ExecEngine}; +use sources::PathSource; use util::config::Config; -use util::{CargoResult, internal, ChainError, profile}; +use util::{CargoResult, profile}; /// Contains information about how a package should be compiled. pub struct CompileOptions<'a> { @@ -104,8 +105,6 @@ pub fn resolve_dependencies<'a>(root_package: &Package, no_default_features: bool) -> CargoResult<(PackageSet<'a>, Resolve)> { - let override_ids = try!(source_ids_from_config(config, root_package.root())); - let mut registry = PackageRegistry::new(config); if let Some(source) = source { @@ -121,7 +120,7 @@ pub fn resolve_dependencies<'a>(root_package: &Package, // overrides, etc. let _p = profile::start("resolving w/ overrides..."); - try!(registry.add_overrides(override_ids)); + try!(add_overrides(&mut registry, root_package.root(), config)); let method = Method::Required{ dev_deps: true, // TODO: remove this option? @@ -383,20 +382,14 @@ fn generate_targets<'a>(pkg: &'a Package, /// Read the `paths` configuration variable to discover all path overrides that /// have been configured. -fn source_ids_from_config(config: &Config, cur_path: &Path) - -> CargoResult> { - - let configs = try!(config.values()); - debug!("loaded config; configs={:?}", configs); - let config_paths = match configs.get("paths") { - Some(cfg) => cfg, - None => return Ok(Vec::new()) +fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, + cur_path: &Path, + config: &'a Config) -> CargoResult<()> { + let paths = match try!(config.get_list("paths")) { + Some(list) => list, + None => return Ok(()) }; - let paths = try!(config_paths.list().chain_error(|| { - internal("invalid configuration for the key `paths`") - })); - - paths.iter().map(|&(ref s, ref p)| { + let paths = paths.val.iter().map(|&(ref s, ref p)| { // The path listed next to the string is the config file in which the // key was located, so we want to pop off the `.cargo/config` component // to get the directory containing the `.cargo` folder. @@ -405,7 +398,15 @@ fn source_ids_from_config(config: &Config, cur_path: &Path) // Make sure we don't override the local package, even if it's in the // list of override paths. cur_path != &**p - }).map(|p| SourceId::for_path(&p)).collect() + }); + + for path in paths { + let id = try!(SourceId::for_path(&path)); + let mut source = PathSource::new_recursive(&path, &id, config); + try!(source.update()); + registry.add_override(&id, Box::new(source)); + } + Ok(()) } /// Parse all config files to learn about build configuration. Currently From f28c7872e66ec55540ae8e86a605a44bf543c084 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 29 Feb 2016 22:20:47 -0800 Subject: [PATCH 4/7] Fix decoding lock files with path dependencies With the previous changes a path dependency must have the precise path to it listed in its package id. Currently when decoding a lockfile, however, all path dependencies have the same package id, which unfortunately causes a mismatch. This commit alters the decoding of a lockfile to perform some simple path traversals to probe the filesystem to understand where path dependencies are and set the right package id for the found packages. --- src/cargo/core/resolver/encode.rs | 123 ++++++++++++++--------- src/cargo/ops/cargo_compile.rs | 2 +- src/cargo/ops/cargo_fetch.rs | 2 +- src/cargo/ops/cargo_generate_lockfile.rs | 3 +- src/cargo/ops/cargo_pkgid.rs | 3 +- src/cargo/ops/cargo_read_manifest.rs | 10 +- src/cargo/ops/lockfile.rs | 15 +-- src/cargo/ops/resolve.rs | 8 +- 8 files changed, 97 insertions(+), 69 deletions(-) diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 9ef4a4ff46f..93d359655c8 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -3,8 +3,8 @@ use std::collections::{HashMap, BTreeMap}; use regex::Regex; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; -use core::{PackageId, SourceId}; -use util::{CargoResult, Graph}; +use core::{Package, PackageId, SourceId}; +use util::{CargoResult, Graph, Config}; use super::Resolve; @@ -18,66 +18,113 @@ pub struct EncodableResolve { pub type Metadata = BTreeMap; impl EncodableResolve { - pub fn to_resolve(&self, default: &SourceId) -> CargoResult { + pub fn to_resolve(&self, root: &Package, config: &Config) + -> CargoResult { + let mut path_deps = HashMap::new(); + try!(build_path_deps(root, &mut path_deps, config)); + let default = root.package_id().source_id(); + let mut g = Graph::new(); let mut tmp = HashMap::new(); let packages = Vec::new(); let packages = self.package.as_ref().unwrap_or(&packages); + let root = try!(to_package_id(&self.root.name, + &self.root.version, + self.root.source.as_ref(), + default, &path_deps)); + let ids = try!(packages.iter().map(|p| { + to_package_id(&p.name, &p.version, p.source.as_ref(), + default, &path_deps) + }).collect::>>()); + { - let mut register_pkg = |pkg: &EncodableDependency| - -> CargoResult<()> { - let pkgid = try!(pkg.to_package_id(default)); + let mut register_pkg = |pkgid: &PackageId| { let precise = pkgid.source_id().precise() .map(|s| s.to_string()); assert!(tmp.insert(pkgid.clone(), precise).is_none(), "a package was referenced twice in the lockfile"); - g.add(try!(pkg.to_package_id(default)), &[]); - Ok(()) + g.add(pkgid.clone(), &[]); }; - try!(register_pkg(&self.root)); - for pkg in packages.iter() { - try!(register_pkg(pkg)); + register_pkg(&root); + for id in ids.iter() { + register_pkg(id); } } { - let mut add_dependencies = |pkg: &EncodableDependency| + let mut add_dependencies = |id: &PackageId, pkg: &EncodableDependency| -> CargoResult<()> { - let package_id = try!(pkg.to_package_id(default)); - let deps = match pkg.dependencies { Some(ref deps) => deps, None => return Ok(()), }; for edge in deps.iter() { - let to_depend_on = try!(edge.to_package_id(default)); + let to_depend_on = try!(to_package_id(&edge.name, + &edge.version, + edge.source.as_ref(), + default, + &path_deps)); let precise_pkgid = tmp.get(&to_depend_on) .map(|p| to_depend_on.with_precise(p.clone())) .unwrap_or(to_depend_on.clone()); - g.link(package_id.clone(), precise_pkgid); + g.link(id.clone(), precise_pkgid); } Ok(()) }; - try!(add_dependencies(&self.root)); - for pkg in packages.iter() { - try!(add_dependencies(pkg)); + try!(add_dependencies(&root, &self.root)); + for (id, pkg) in ids.iter().zip(packages) { + try!(add_dependencies(id, pkg)); } } Ok(Resolve { graph: g, - root: try!(self.root.to_package_id(default)), + root: root, features: HashMap::new(), metadata: self.metadata.clone(), }) } } +fn build_path_deps(root: &Package, + map: &mut HashMap, + config: &Config) + -> CargoResult<()> { + assert!(root.package_id().source_id().is_path()); + + let deps = root.dependencies() + .iter() + .map(|d| d.source_id()) + .filter(|id| id.is_path()) + .filter_map(|id| id.url().to_file_path().ok()) + .map(|path| path.join("Cargo.toml")) + .filter_map(|path| Package::for_path(&path, config).ok()); + for pkg in deps { + let source_id = pkg.package_id().source_id(); + if map.insert(pkg.name().to_string(), source_id.clone()).is_none() { + try!(build_path_deps(&pkg, map, config)); + } + } + + Ok(()) +} + +fn to_package_id(name: &str, + version: &str, + source: Option<&SourceId>, + default_source: &SourceId, + path_sources: &HashMap) + -> CargoResult { + let source = source.or(path_sources.get(name)).unwrap_or(default_source); + PackageId::new(name, version, source) +} + + #[derive(RustcEncodable, RustcDecodable, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct EncodableDependency { name: String, @@ -86,15 +133,6 @@ pub struct EncodableDependency { dependencies: Option> } -impl EncodableDependency { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - &self.name, - &self.version, - self.source.as_ref().unwrap_or(default_source)) - } -} - #[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct EncodablePackageId { name: String, @@ -134,15 +172,6 @@ impl Decodable for EncodablePackageId { } } -impl EncodablePackageId { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - &self.name, - &self.version, - self.source.as_ref().unwrap_or(default_source)) - } -} - impl Encodable for Resolve { fn encode(&self, s: &mut S) -> Result<(), S::Error> { let mut ids: Vec<&PackageId> = self.graph.iter().collect(); @@ -151,28 +180,26 @@ impl Encodable for Resolve { let encodable = ids.iter().filter_map(|&id| { if self.root == *id { return None; } - Some(encodable_resolve_node(id, &self.root, &self.graph)) + Some(encodable_resolve_node(id, &self.graph)) }).collect::>(); EncodableResolve { package: Some(encodable), - root: encodable_resolve_node(&self.root, &self.root, &self.graph), + root: encodable_resolve_node(&self.root, &self.graph), metadata: self.metadata.clone(), }.encode(s) } } -fn encodable_resolve_node(id: &PackageId, root: &PackageId, - graph: &Graph) -> EncodableDependency { +fn encodable_resolve_node(id: &PackageId, graph: &Graph) + -> EncodableDependency { let deps = graph.edges(id).map(|edge| { - let mut deps = edge.map(|e| { - encodable_package_id(e, root) - }).collect::>(); + let mut deps = edge.map(encodable_package_id).collect::>(); deps.sort(); deps }); - let source = if id.source_id() == root.source_id() { + let source = if id.source_id().is_path() { None } else { Some(id.source_id().clone()) @@ -186,8 +213,8 @@ fn encodable_resolve_node(id: &PackageId, root: &PackageId, } } -fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { - let source = if id.source_id() == root.source_id() { +fn encodable_package_id(id: &PackageId) -> EncodablePackageId { + let source = if id.source_id().is_path() { None } else { Some(id.source_id().with_precise(None)) diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index e24e6f2cf2c..1ac22988581 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -113,7 +113,7 @@ pub fn resolve_dependencies<'a>(root_package: &Package, // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. - let resolve = try!(ops::resolve_pkg(&mut registry, root_package)); + let resolve = try!(ops::resolve_pkg(&mut registry, root_package, config)); // Second, resolve with precisely what we're doing. Filter out // transitive dependencies if necessary, specify features, handle diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index 0617a68bfb0..e8f401d5333 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -11,7 +11,7 @@ pub fn fetch<'a>(manifest_path: &Path, -> CargoResult<(Resolve, PackageSet<'a>)> { let package = try!(Package::for_path(manifest_path, config)); let mut registry = PackageRegistry::new(config); - let resolve = try!(ops::resolve_pkg(&mut registry, &package)); + let resolve = try!(ops::resolve_pkg(&mut registry, &package, config)); let packages = get_resolved_packages(&resolve, registry); for id in resolve.iter() { try!(packages.get(id)); diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index fc2cdf4301f..1b8bb0087f0 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -31,7 +31,8 @@ pub fn update_lockfile(manifest_path: &Path, opts: &UpdateOptions) -> CargoResult<()> { let package = try!(Package::for_path(manifest_path, opts.config)); - let previous_resolve = match try!(ops::load_pkg_lockfile(&package)) { + let previous_resolve = match try!(ops::load_pkg_lockfile(&package, + opts.config)) { Some(resolve) => resolve, None => bail!("a Cargo.lock must exist before it is updated") }; diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs index 1d31a3015bd..2bf32e627f4 100644 --- a/src/cargo/ops/cargo_pkgid.rs +++ b/src/cargo/ops/cargo_pkgid.rs @@ -10,8 +10,7 @@ pub fn pkgid(manifest_path: &Path, let package = try!(Package::for_path(manifest_path, config)); let lockfile = package.root().join("Cargo.lock"); - let source_id = package.package_id().source_id(); - let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { + let resolve = match try!(ops::load_lockfile(&lockfile, &package, config)) { Some(resolve) => resolve, None => bail!("a Cargo.lock must exist for this command"), }; diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs index 319bf80f210..20a672e2867 100644 --- a/src/cargo/ops/cargo_read_manifest.rs +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -1,11 +1,11 @@ use std::collections::{HashMap, HashSet}; -use std::fs::{self, File}; +use std::fs; use std::io::prelude::*; use std::io; use std::path::{Path, PathBuf}; use core::{Package, Manifest, SourceId, PackageId}; -use util::{self, CargoResult, human, Config, ChainError}; +use util::{self, paths, CargoResult, human, Config, ChainError}; use util::important_paths::find_project_manifest_exact; use util::toml::{Layout, project_layout}; @@ -22,13 +22,11 @@ pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId, pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) -> CargoResult<(Package, Vec)> { trace!("read_package; path={}; source-id={}", path.display(), source_id); - let mut file = try!(File::open(path)); - let mut data = Vec::new(); - try!(file.read_to_end(&mut data)); + let data = try!(paths::read(path)); let layout = project_layout(path.parent().unwrap()); let (manifest, nested) = - try!(read_manifest(&data, layout, source_id, config)); + try!(read_manifest(data.as_bytes(), layout, source_id, config)); Ok((Package::new(manifest, path), nested)) } diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index 8f9489c14f2..e959203726f 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -5,19 +5,20 @@ use std::path::Path; use rustc_serialize::{Encodable, Decodable}; use toml::{self, Encoder, Value}; -use core::{Resolve, resolver, Package, SourceId}; -use util::{CargoResult, ChainError, human, paths}; +use core::{Resolve, resolver, Package}; +use util::{CargoResult, ChainError, human, paths, Config}; use util::toml as cargo_toml; -pub fn load_pkg_lockfile(pkg: &Package) -> CargoResult> { +pub fn load_pkg_lockfile(pkg: &Package, config: &Config) + -> CargoResult> { let lockfile = pkg.root().join("Cargo.lock"); - let source_id = pkg.package_id().source_id(); - load_lockfile(&lockfile, source_id).chain_error(|| { + load_lockfile(&lockfile, pkg, config).chain_error(|| { human(format!("failed to parse lock file at: {}", lockfile.display())) }) } -pub fn load_lockfile(path: &Path, sid: &SourceId) -> CargoResult> { +pub fn load_lockfile(path: &Path, pkg: &Package, config: &Config) + -> CargoResult> { // If there is no lockfile, return none. let mut f = match File::open(path) { Ok(f) => f, @@ -30,7 +31,7 @@ pub fn load_lockfile(path: &Path, sid: &SourceId) -> CargoResult let table = toml::Value::Table(try!(cargo_toml::parse(&s, path))); let mut d = toml::Decoder::new(table); let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d)); - Ok(Some(try!(v.to_resolve(sid)))) + Ok(Some(try!(v.to_resolve(pkg, config)))) } pub fn write_pkg_lockfile(pkg: &Package, resolve: &Resolve) -> CargoResult<()> { diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 3822c801340..f5925a46c50 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -4,16 +4,18 @@ use core::{Package, PackageId, SourceId}; use core::registry::PackageRegistry; use core::resolver::{self, Resolve, Method}; use ops; -use util::CargoResult; +use util::{CargoResult, Config}; /// Resolve all dependencies for the specified `package` using the previous /// lockfile as a guide if present. /// /// This function will also write the result of resolution as a new /// lockfile. -pub fn resolve_pkg(registry: &mut PackageRegistry, package: &Package) +pub fn resolve_pkg(registry: &mut PackageRegistry, + package: &Package, + config: &Config) -> CargoResult { - let prev = try!(ops::load_pkg_lockfile(package)); + let prev = try!(ops::load_pkg_lockfile(package, config)); let resolve = try!(resolve_with_previous(registry, package, Method::Everything, prev.as_ref(), None)); From 9185445ae19e682fdf622f6cab60f0d047eefd45 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 1 Mar 2016 08:20:16 -0800 Subject: [PATCH 5/7] Fix some packaging logic in path sources Currently the packaging logic depends on the old recursive nature of path sources for a few points: * Discovery of a git repository of a package. * Filtering out of sibling packages for only including the right set of files. For a non-recursive path source (now essentially the default) we can no longer assume that we have a listing of all packages. Subsequently this logic was tweaked to allow: * Instead of looking for packages at the root of a repo, we instead look for a Cargo.toml at the root of a git repository. * We keep track of all Cargo.toml files found in a repository and prune out all files which appear to be ancestors of that package. --- src/cargo/sources/path.rs | 106 ++++++++++++++++++++++++-------------- 1 file changed, 67 insertions(+), 39 deletions(-) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index e354f7a06ce..1492d99a70b 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -108,24 +108,39 @@ impl<'cfg> PathSource<'cfg> { } }; - // If this package is a git repository, then we really do want to query - // the git repository as it takes into account items such as .gitignore. - // We're not quite sure where the git repository is, however, so we do a - // bit of a probe. + // If this package is in a git repository, then we really do want to + // query the git repository as it takes into account items such as + // .gitignore. We're not quite sure where the git repository is, + // however, so we do a bit of a probe. // - // We check all packages in this source that are ancestors of the - // specified package (including the same package) to see if they're at - // the root of the git repository. This isn't always true, but it'll get - // us there most of the time! - let repo = self.packages.iter() - .map(|pkg| pkg.root()) - .filter(|path| root.starts_with(path)) - .filter_map(|path| git2::Repository::open(&path).ok()) - .next(); - match repo { - Some(repo) => self.list_files_git(pkg, repo, &mut filter), - None => self.list_files_walk(pkg, &mut filter), + // We walk this package's path upwards and look for a sibling + // Cargo.toml and .git folder. If we find one then we assume that we're + // part of that repository. + let mut cur = root; + loop { + if cur.join("Cargo.toml").is_file() { + // If we find a git repository next to this Cargo.toml, we still + // check to see if we are indeed part of the index. If not, then + // this is likely an unrelated git repo, so keep going. + if let Ok(repo) = git2::Repository::open(cur) { + let index = try!(repo.index()); + let path = util::without_prefix(root, cur) + .unwrap().join("Cargo.toml"); + if index.get_path(&path, 0).is_some() { + return self.list_files_git(pkg, repo, &mut filter); + } + } + } + // don't cross submodule boundaries + if cur.join(".git").is_dir() { + break + } + match cur.parent() { + Some(parent) => cur = parent, + None => break, + } } + self.list_files_walk(pkg, &mut filter) } fn list_files_git(&self, pkg: &Package, repo: git2::Repository, @@ -138,7 +153,7 @@ impl<'cfg> PathSource<'cfg> { })); let pkg_path = pkg.root(); - let mut ret = Vec::new(); + let mut ret = Vec::::new(); // We use information from the git repository to guide us in traversing // its tree. The primary purpose of this is to take advantage of the @@ -165,32 +180,48 @@ impl<'cfg> PathSource<'cfg> { } }); + let mut subpackages_found = Vec::new(); + 'outer: for (file_path, is_dir) in index_files.chain(untracked) { let file_path = try!(file_path); - // Filter out files outside this package. - if !file_path.starts_with(pkg_path) { continue } - - // Filter out Cargo.lock and target always - { - let fname = file_path.file_name().and_then(|s| s.to_str()); - if fname == Some("Cargo.lock") { continue } - if fname == Some("target") { continue } + // Filter out files blatantly outside this package. This is helped a + // bit obove via the `pathspec` function call, but we need to filter + // the entries in the index as well. + if !file_path.starts_with(pkg_path) { + continue } - // Filter out sub-packages of this package - for other_pkg in self.packages.iter().filter(|p| *p != pkg) { - let other_path = other_pkg.root(); - if other_path.starts_with(pkg_path) && - file_path.starts_with(other_path) { - continue 'outer; + match file_path.file_name().and_then(|s| s.to_str()) { + // Filter out Cargo.lock and target always, we don't want to + // package a lock file no one will ever read and we also avoid + // build artifacts + Some("Cargo.lock") | + Some("target") => continue, + + // Keep track of all sub-packages found and also strip out all + // matches we've found so far. Note, though, that if we find + // our own `Cargo.toml` we keep going. + Some("Cargo.toml") => { + let path = file_path.parent().unwrap(); + if path != pkg_path { + warn!("subpackage found: {}", path.display()); + ret.retain(|p| !p.starts_with(path)); + subpackages_found.push(path.to_path_buf()); + continue + } } + + _ => {} } - let is_dir = is_dir.or_else(|| { - fs::metadata(&file_path).ok().map(|m| m.is_dir()) - }).unwrap_or(false); - if is_dir { + // If this file is part of any other sub-package we've found so far, + // skip it. + if subpackages_found.iter().any(|p| file_path.starts_with(p)) { + continue + } + + if is_dir.unwrap_or_else(|| file_path.is_dir()) { warn!(" found submodule {}", file_path.display()); let rel = util::without_prefix(&file_path, &root).unwrap(); let rel = try!(rel.to_str().chain_error(|| { @@ -237,10 +268,7 @@ impl<'cfg> PathSource<'cfg> { fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool) -> CargoResult> { let mut ret = Vec::new(); - for pkg in self.packages.iter().filter(|p| *p == pkg) { - let loc = pkg.root(); - try!(PathSource::walk(loc, &mut ret, true, filter)); - } + try!(PathSource::walk(pkg.root(), &mut ret, true, filter)); Ok(ret) } From 09847df8d30b1572e85d58be477e7c24117c8431 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 1 Mar 2016 08:24:43 -0800 Subject: [PATCH 6/7] Fix all tests with recent changes The package id for path dependencies now has another path component pointing precisely to the package being compiled, so lots of tests need their output matches to get updated. --- src/cargo/util/paths.rs | 8 ++-- tests/test_cargo_compile.rs | 6 +-- tests/test_cargo_compile_custom_build.rs | 2 +- tests/test_cargo_compile_git_deps.rs | 8 ++-- tests/test_cargo_compile_path_deps.rs | 56 ++++++++++++------------ tests/test_cargo_compile_plugins.rs | 4 +- tests/test_cargo_cross_compile.rs | 4 +- tests/test_cargo_doc.rs | 6 +-- tests/test_cargo_features.rs | 20 ++++----- tests/test_cargo_install.rs | 20 ++++++--- tests/test_cargo_package.rs | 15 ++++++- tests/test_cargo_profiles.rs | 2 +- tests/test_cargo_registry.rs | 4 +- tests/test_cargo_run.rs | 4 +- tests/test_cargo_rustc.rs | 7 ++- tests/test_cargo_rustdoc.rs | 14 +++--- tests/test_cargo_test.rs | 12 ++--- 17 files changed, 105 insertions(+), 87 deletions(-) diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index 2e9397df6c4..5770b80ea6f 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -73,8 +73,8 @@ pub fn read(path: &Path) -> CargoResult { let mut f = try!(File::open(path)); try!(f.read_to_string(&mut ret)); Ok(ret) - }).chain_error(|| { - internal(format!("failed to read `{}`", path.display())) + })().map_err(human).chain_error(|| { + human(format!("failed to read `{}`", path.display())) }) } @@ -83,8 +83,8 @@ pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { let mut f = try!(File::create(path)); try!(f.write_all(contents)); Ok(()) - }).chain_error(|| { - internal(format!("failed to write `{}`", path.display())) + })().map_err(human).chain_error(|| { + human(format!("failed to write `{}`", path.display())) }) } diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index 2fddc664cf3..897043e72cb 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -315,7 +315,7 @@ test!(cargo_compile_with_warnings_in_a_dep_package { assert_that(p.cargo_process("build"), execs() - .with_stdout(&format!("{} bar v0.5.0 ({})\n\ + .with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url())) @@ -607,7 +607,7 @@ test!(cargo_compile_with_dep_name_mismatch { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!( r#"no matching package named `notquitebar` found (required by `foo`) -location searched: {proj_dir} +location searched: {proj_dir}/bar version required: * "#, proj_dir = p.url()))); }); @@ -1004,7 +1004,7 @@ test!(verbose_release_build_deps { .file("foo/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) +{compiling} foo v0.0.0 ({url}/foo) {running} `rustc foo[..]src[..]lib.rs --crate-name foo \ --crate-type dylib --crate-type rlib -C prefer-dynamic \ -C opt-level=3 \ diff --git a/tests/test_cargo_compile_custom_build.rs b/tests/test_cargo_compile_custom_build.rs index 4924787dc86..7c2ed61035e 100644 --- a/tests/test_cargo_compile_custom_build.rs +++ b/tests/test_cargo_compile_custom_build.rs @@ -970,7 +970,7 @@ test!(shared_dep_with_a_build_script { authors = [] [dependencies.a] - path = "../b" + path = "../a" "#) .file("b/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), diff --git a/tests/test_cargo_compile_git_deps.rs b/tests/test_cargo_compile_git_deps.rs index e8614423316..28f9ac8be95 100644 --- a/tests/test_cargo_compile_git_deps.rs +++ b/tests/test_cargo_compile_git_deps.rs @@ -622,8 +622,8 @@ test!(update_with_shared_deps { execs().with_stdout(&format!("\ {updating} git repository `{git}` {compiling} bar v0.5.0 ({git}#[..]) -{compiling} [..] v0.5.0 ({dir}) -{compiling} [..] v0.5.0 ({dir}) +{compiling} [..] v0.5.0 ([..]) +{compiling} [..] v0.5.0 ([..]) {compiling} foo v0.5.0 ({dir})\n", updating = UPDATING, git = git_project.url(), compiling = COMPILING, dir = p.url()))); @@ -681,8 +681,8 @@ To learn more, run the command again with --verbose. assert_that(p.cargo("build"), execs().with_stdout(&format!("\ {compiling} bar v0.5.0 ({git}#[..]) -{compiling} [..] v0.5.0 ({dir}) -{compiling} [..] v0.5.0 ({dir}) +{compiling} [..] v0.5.0 ({dir}[..]dep[..]) +{compiling} [..] v0.5.0 ({dir}[..]dep[..]) {compiling} foo v0.5.0 ({dir})\n", git = git_project.url(), compiling = COMPILING, dir = p.url()))); diff --git a/tests/test_cargo_compile_path_deps.rs b/tests/test_cargo_compile_path_deps.rs index 4757f3cb9d5..9eaaac0a81d 100644 --- a/tests/test_cargo_compile_path_deps.rs +++ b/tests/test_cargo_compile_path_deps.rs @@ -72,8 +72,8 @@ test!(cargo_compile_with_nested_deps_shorthand { assert_that(p.cargo_process("build"), execs().with_status(0) - .with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ + .with_stdout(&format!("{} baz v0.5.0 ({}/bar/baz)\n\ + {} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), @@ -90,13 +90,13 @@ test!(cargo_compile_with_nested_deps_shorthand { println!("building baz"); assert_that(p.cargo("build").arg("-p").arg("baz"), execs().with_status(0) - .with_stdout(&format!("{} baz v0.5.0 ({})\n", + .with_stdout(&format!("{} baz v0.5.0 ({}/bar/baz)\n", COMPILING, p.url()))); println!("building foo"); assert_that(p.cargo("build") .arg("-p").arg("foo"), execs().with_status(0) - .with_stdout(&format!("{} bar v0.5.0 ({})\n\ + .with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -176,15 +176,15 @@ test!(cargo_compile_with_root_dev_deps_with_testing { p2.build(); assert_that(p.cargo_process("test"), execs().with_stdout(&format!("\ -{compiling} [..] v0.5.0 ({url}) -{compiling} [..] v0.5.0 ({url}) +{compiling} [..] v0.5.0 ([..]) +{compiling} [..] v0.5.0 ([..]) {running} target[..]foo-[..] running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured -", compiling = COMPILING, url = p.url(), running = RUNNING))); +", compiling = COMPILING, running = RUNNING))); }); test!(cargo_compile_with_transitive_dev_deps { @@ -229,7 +229,7 @@ test!(cargo_compile_with_transitive_dev_deps { "#); assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -271,7 +271,7 @@ test!(no_rebuild_dependency { "#); // First time around we should compile both foo and bar assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -282,7 +282,7 @@ test!(no_rebuild_dependency { p.build(); // rebuild the files (rewriting them in the process) assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -337,8 +337,8 @@ test!(deep_dependencies_trigger_rebuild { pub fn baz() {} "#); assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} baz v0.5.0 ({}/baz)\n\ + {} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), @@ -355,8 +355,8 @@ test!(deep_dependencies_trigger_rebuild { pub fn baz() { println!("hello!"); } "#).unwrap(); assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} baz v0.5.0 ({}/baz)\n\ + {} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), @@ -369,7 +369,7 @@ test!(deep_dependencies_trigger_rebuild { pub fn bar() { println!("hello!"); baz::baz(); } "#).unwrap(); assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -426,8 +426,8 @@ test!(no_rebuild_two_deps { pub fn baz() {} "#); assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} baz v0.5.0 ({}/baz)\n\ + {} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url(), @@ -473,7 +473,7 @@ test!(nested_deps_recompile { let bar = p.url(); assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/src/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, bar, COMPILING, p.url()))); @@ -509,14 +509,16 @@ test!(error_message_for_missing_manifest { .file("src/bar/not-a-manifest", ""); assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr(&format!("\ + execs().with_status(101) + .with_stderr("\ Unable to update file://[..] Caused by: - Could not find `Cargo.toml` in `{}` -", p.root().join("src").join("bar").display()))); + failed to read `[..]bar[..]Cargo.toml` + +Caused by: + No such file or directory ([..]) +")); }); @@ -678,7 +680,7 @@ test!(path_dep_build_cmd { p.root().join("bar").move_into_the_past().unwrap(); assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -695,7 +697,7 @@ test!(path_dep_build_cmd { } assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ + execs().with_stdout(&format!("{} bar v0.5.0 ({}/bar)\n\ {} foo v0.5.0 ({})\n", COMPILING, p.url(), COMPILING, p.url()))); @@ -741,8 +743,8 @@ test!(dev_deps_no_rebuild_lib { assert_that(p.cargo("test"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} [..] v0.5.0 ({url}) -{compiling} [..] v0.5.0 ({url}) +{compiling} [..] v0.5.0 ({url}[..]) +{compiling} [..] v0.5.0 ({url}[..]) {running} target[..]foo-[..] running 0 tests diff --git a/tests/test_cargo_compile_plugins.rs b/tests/test_cargo_compile_plugins.rs index 6bbbe8c03f6..537d065c3c8 100644 --- a/tests/test_cargo_compile_plugins.rs +++ b/tests/test_cargo_compile_plugins.rs @@ -264,7 +264,7 @@ test!(native_plugin_dependency_with_custom_ar_linker { foo.build(); assert_that(bar.cargo_process("build").arg("--verbose"), execs().with_stdout(&format!("\ -{compiling} foo v0.0.1 ({url}) +{compiling} foo v0.0.1 ([..]) {running} `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` -", compiling = COMPILING, running = RUNNING, url = bar.url()))) +", compiling = COMPILING, running = RUNNING))); }); diff --git a/tests/test_cargo_cross_compile.rs b/tests/test_cargo_cross_compile.rs index 6536c526e0d..8c16a485ab7 100644 --- a/tests/test_cargo_cross_compile.rs +++ b/tests/test_cargo_cross_compile.rs @@ -602,7 +602,7 @@ test!(build_script_needed_for_host_and_target { assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0) .with_stdout_contains(&format!("\ -{compiling} d1 v0.0.0 ({url})", compiling = COMPILING, url = p.url())) +{compiling} d1 v0.0.0 ({url}/d1)", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc d1[..]build.rs [..] --out-dir {dir}[..]target[..]build[..]d1-[..]`", running = RUNNING, dir = p.root().display())) @@ -612,7 +612,7 @@ test!(build_script_needed_for_host_and_target { .with_stdout_contains(&format!("\ {running} `rustc d1[..]src[..]lib.rs [..]`", running = RUNNING)) .with_stdout_contains(&format!("\ -{compiling} d2 v0.0.0 ({url})", compiling = COMPILING, url = p.url())) +{compiling} d2 v0.0.0 ({url}/d2)", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc d2[..]src[..]lib.rs [..] \ -L /path/to/{host}`", running = RUNNING, host = host)) diff --git a/tests/test_cargo_doc.rs b/tests/test_cargo_doc.rs index 4ce4996c0e9..a53a621202f 100644 --- a/tests/test_cargo_doc.rs +++ b/tests/test_cargo_doc.rs @@ -101,8 +101,8 @@ test!(doc_deps { assert_that(p.cargo_process("doc"), execs().with_status(0).with_stdout(&format!("\ -[..] bar v0.0.1 ({dir}) -[..] bar v0.0.1 ({dir}) +[..] bar v0.0.1 ({dir}/bar) +[..] bar v0.0.1 ({dir}/bar) {documenting} foo v0.0.1 ({dir}) ", documenting = DOCUMENTING, @@ -148,7 +148,7 @@ test!(doc_no_deps { assert_that(p.cargo_process("doc").arg("--no-deps"), execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) +{compiling} bar v0.0.1 ({dir}/bar) {documenting} foo v0.0.1 ({dir}) ", documenting = DOCUMENTING, compiling = COMPILING, diff --git a/tests/test_cargo_features.rs b/tests/test_cargo_features.rs index 11211087b0b..b7f62c2c84f 100644 --- a/tests/test_cargo_features.rs +++ b/tests/test_cargo_features.rs @@ -253,7 +253,7 @@ test!(no_feature_doesnt_build { assert_that(p.cargo("build").arg("--features").arg("bar"), execs().with_status(0).with_stdout(format!("\ -{compiling} bar v0.0.1 ({dir}) +{compiling} bar v0.0.1 ({dir}/bar) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); assert_that(p.process(&p.bin("foo")), @@ -293,7 +293,7 @@ test!(default_feature_pulled_in { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ -{compiling} bar v0.0.1 ({dir}) +{compiling} bar v0.0.1 ({dir}/bar) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); assert_that(p.process(&p.bin("foo")), @@ -394,8 +394,8 @@ test!(groups_on_groups_on_groups { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ -{compiling} ba[..] v0.0.1 ({dir}) -{compiling} ba[..] v0.0.1 ({dir}) +{compiling} ba[..] v0.0.1 ({dir}/ba[..]) +{compiling} ba[..] v0.0.1 ({dir}/ba[..]) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); @@ -438,8 +438,8 @@ test!(many_cli_features { assert_that(p.cargo_process("build").arg("--features").arg("bar baz"), execs().with_status(0).with_stdout(format!("\ -{compiling} ba[..] v0.0.1 ({dir}) -{compiling} ba[..] v0.0.1 ({dir}) +{compiling} ba[..] v0.0.1 ({dir}/ba[..]) +{compiling} ba[..] v0.0.1 ({dir}/ba[..]) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); @@ -499,8 +499,8 @@ test!(union_features { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ -{compiling} d2 v0.0.1 ({dir}) -{compiling} d1 v0.0.1 ({dir}) +{compiling} d2 v0.0.1 ({dir}/d2) +{compiling} d1 v0.0.1 ({dir}/d1) {compiling} foo v0.0.1 ({dir}) ", compiling = COMPILING, dir = p.url()))); }); @@ -533,14 +533,14 @@ test!(many_features_no_rebuilds { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(format!("\ -{compiling} a v0.1.0 ({dir}) +{compiling} a v0.1.0 ({dir}/a) {compiling} b v0.1.0 ({dir}) ", compiling = COMPILING, dir = p.url()))); p.root().move_into_the_past().unwrap(); assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stdout(format!("\ -{fresh} a v0.1.0 ([..]) +{fresh} a v0.1.0 ([..]/a) {fresh} b v0.1.0 ([..]) ", fresh = FRESH))); }); diff --git a/tests/test_cargo_install.rs b/tests/test_cargo_install.rs index e56e15e9f0f..247b654dde4 100644 --- a/tests/test_cargo_install.rs +++ b/tests/test_cargo_install.rs @@ -124,7 +124,10 @@ test!(no_crate { `[..]` is not a crate root; specify a crate to install [..] Caused by: - Could not find Cargo.toml in `[..]` + failed to read `[..]Cargo.toml` + +Caused by: + No such file or directory ([..]) ")); }); @@ -197,7 +200,7 @@ binary `foo[..]` already exists in destination as part of `foo v0.1.0 [..]` }); test!(multiple_crates_error { - let p = project("foo") + let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", r#" [package] name = "foo" @@ -214,14 +217,14 @@ test!(multiple_crates_error { .file("a/src/main.rs", "fn main() {}"); p.build(); - assert_that(cargo_process("install").arg("--path").arg(p.root()), + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()), execs().with_status(101).with_stderr("\ multiple packages with binaries found: bar, foo ")); }); test!(multiple_crates_select { - let p = project("foo") + let p = git::repo(&paths::root().join("foo")) .file("Cargo.toml", r#" [package] name = "foo" @@ -238,12 +241,14 @@ test!(multiple_crates_select { .file("a/src/main.rs", "fn main() {}"); p.build(); - assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("foo"), + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()) + .arg("foo"), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); assert_that(cargo_home(), is_not(has_installed_exe("bar"))); - assert_that(cargo_process("install").arg("--path").arg(p.root()).arg("bar"), + assert_that(cargo_process("install").arg("--git").arg(p.url().to_string()) + .arg("bar"), execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("bar")); }); @@ -541,7 +546,8 @@ test!(installs_from_cwd_by_default { .file("src/main.rs", "fn main() {}"); p.build(); - assert_that(cargo_process("install"), execs().with_status(0)); + assert_that(cargo_process("install").cwd(p.root()), + execs().with_status(0)); assert_that(cargo_home(), has_installed_exe("foo")); }); diff --git a/tests/test_cargo_package.rs b/tests/test_cargo_package.rs index 2cea29350d3..072519da711 100644 --- a/tests/test_cargo_package.rs +++ b/tests/test_cargo_package.rs @@ -162,11 +162,24 @@ test!(package_verbose { let mut cargo = ::cargo_process(); cargo.cwd(p.root()); assert_that(cargo.clone().arg("build"), execs().with_status(0)); - assert_that(cargo.arg("package").arg("-v").arg("--no-verify"), + + println!("package main repo"); + assert_that(cargo.clone().arg("package").arg("-v").arg("--no-verify"), execs().with_status(0).with_stdout(&format!("\ {packaging} foo v0.0.1 ([..]) {archiving} [..] {archiving} [..] +", + packaging = PACKAGING, + archiving = ARCHIVING))); + + println!("package sub-repo"); + assert_that(cargo.arg("package").arg("-v").arg("--no-verify") + .cwd(p.root().join("a")), + execs().with_status(0).with_stdout(&format!("\ +{packaging} a v0.0.1 ([..]) +{archiving} [..] +{archiving} [..] ", packaging = PACKAGING, archiving = ARCHIVING))); diff --git a/tests/test_cargo_profiles.rs b/tests/test_cargo_profiles.rs index b41d4e851d7..e8a6193cd57 100644 --- a/tests/test_cargo_profiles.rs +++ b/tests/test_cargo_profiles.rs @@ -78,7 +78,7 @@ test!(top_level_overrides_deps { .file("foo/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--release"), execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) +{compiling} foo v0.0.0 ({url}/foo) {running} `rustc foo{sep}src{sep}lib.rs --crate-name foo \ --crate-type dylib --crate-type rlib -C prefer-dynamic \ -C opt-level=1 \ diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index bbc531978ad..0274bd87003 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -618,7 +618,7 @@ test!(updating_a_dep { {updating} registry `[..]` {downloading} bar v0.0.1 (registry file://[..]) {compiling} bar v0.0.1 (registry file://[..]) -{compiling} a v0.0.1 ({dir}) +{compiling} a v0.0.1 ({dir}/a) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -640,7 +640,7 @@ test!(updating_a_dep { {updating} registry `[..]` {downloading} bar v0.1.0 (registry file://[..]) {compiling} bar v0.1.0 (registry file://[..]) -{compiling} a v0.0.1 ({dir}) +{compiling} a v0.0.1 ({dir}/a) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); diff --git a/tests/test_cargo_run.rs b/tests/test_cargo_run.rs index 1cb3550aaf6..cd37c2ea004 100644 --- a/tests/test_cargo_run.rs +++ b/tests/test_cargo_run.rs @@ -332,7 +332,7 @@ test!(example_with_release_flag { assert_that(p.cargo_process("run").arg("-v").arg("--release").arg("--example").arg("a"), execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({url}) +{compiling} bar v0.0.1 ({url}/bar) {running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ -C opt-level=3 \ -C metadata=[..] \ @@ -361,7 +361,7 @@ fast2 assert_that(p.cargo("run").arg("-v").arg("--example").arg("a"), execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({url}) +{compiling} bar v0.0.1 ({url}/bar) {running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ -g \ -C metadata=[..] \ diff --git a/tests/test_cargo_rustc.rs b/tests/test_cargo_rustc.rs index cda7726060b..2816d92a194 100644 --- a/tests/test_cargo_rustc.rs +++ b/tests/test_cargo_rustc.rs @@ -249,7 +249,7 @@ test!(build_foo_with_bar_dependency { execs() .with_status(0) .with_stdout(format!("\ -{compiling} bar v0.1.0 ({url}) +{compiling} bar v0.1.0 ([..]) {running} `[..] -g -C [..]` {compiling} foo v0.0.1 ({url}) {running} `[..] -g -Z unstable-options [..]` @@ -292,11 +292,10 @@ test!(build_only_bar_dependency { execs() .with_status(0) .with_stdout(format!("\ -{compiling} bar v0.1.0 ({url}) +{compiling} bar v0.1.0 ([..]) {running} `[..]--crate-name bar --crate-type lib [..] -Z unstable-options [..]` ", - compiling = COMPILING, running = RUNNING, - url = foo.url()))); + compiling = COMPILING, running = RUNNING))); }); test!(fail_with_multiple_packages { diff --git a/tests/test_cargo_rustdoc.rs b/tests/test_cargo_rustdoc.rs index 0951d831fbb..8d47bf8973f 100644 --- a/tests/test_cargo_rustdoc.rs +++ b/tests/test_cargo_rustdoc.rs @@ -89,8 +89,8 @@ test!(rustdoc_foo_with_bar_dependency { execs() .with_status(0) .with_stdout(format!("\ -{compiling} bar v0.0.1 ({url}) -{running} `rustc {bar_dir}{sep}src{sep}lib.rs [..]` +{compiling} bar v0.0.1 ([..]) +{running} `rustc [..]bar{sep}src{sep}lib.rs [..]` {documenting} foo v0.0.1 ({url}) {running} `rustdoc src{sep}lib.rs --crate-name foo \ -o {dir}{sep}target{sep}doc \ @@ -101,8 +101,7 @@ test!(rustdoc_foo_with_bar_dependency { ", running = RUNNING, compiling = COMPILING, sep = SEP, documenting = DOCUMENTING, - dir = foo.root().display(), url = foo.url(), - bar_dir = bar.root().display()))); + dir = foo.root().display(), url = foo.url()))); }); test!(rustdoc_only_bar_dependency { @@ -139,16 +138,15 @@ test!(rustdoc_only_bar_dependency { execs() .with_status(0) .with_stdout(format!("\ -{documenting} bar v0.0.1 ({url}) -{running} `rustdoc {bar_dir}{sep}src{sep}lib.rs --crate-name bar \ +{documenting} bar v0.0.1 ([..]) +{running} `rustdoc [..]bar{sep}src{sep}lib.rs --crate-name bar \ -o {dir}{sep}target{sep}doc \ --no-defaults \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ -L dependency={dir}{sep}target{sep}debug{sep}deps` ", running = RUNNING, documenting = DOCUMENTING, sep = SEP, - dir = foo.root().display(), url = foo.url(), - bar_dir = bar.root().display()))); + dir = foo.root().display()))); }); diff --git a/tests/test_cargo_test.rs b/tests/test_cargo_test.rs index 7fc0042d296..ab5cf5f778f 100644 --- a/tests/test_cargo_test.rs +++ b/tests/test_cargo_test.rs @@ -82,7 +82,7 @@ test!(cargo_test_release { assert_that(p.cargo_process("test").arg("-v").arg("--release"), execs().with_stdout(format!("\ -{compiling} bar v0.0.1 ({dir}) +{compiling} bar v0.0.1 ({dir}/bar) {running} [..] -C opt-level=3 [..] {compiling} foo v0.1.0 ({dir}) {running} [..] -C opt-level=3 [..] @@ -314,7 +314,7 @@ test!(test_with_deep_lib_dep { assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) +{compiling} foo v0.0.1 ([..]) {compiling} bar v0.0.1 ({dir}) {running} target[..] @@ -951,7 +951,7 @@ test!(test_dylib { assert_that(p.cargo_process("test"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) +{compiling} bar v0.0.1 ({dir}/bar) {compiling} foo v0.0.1 ({dir}) {running} target[..]foo-[..] @@ -1259,7 +1259,7 @@ test!(selective_testing { assert_that(p.cargo("test").arg("-p").arg("d1"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} d1 v0.0.1 ({dir}) +{compiling} d1 v0.0.1 ({dir}/d1) {running} target[..]d1-[..] running 0 tests @@ -1279,7 +1279,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured assert_that(p.cargo("test").arg("-p").arg("d2"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} d2 v0.0.1 ({dir}) +{compiling} d2 v0.0.1 ({dir}/d2) {running} target[..]d2-[..] running 0 tests @@ -1457,7 +1457,7 @@ test!(selective_testing_with_docs { assert_that(p.cargo("test").arg("-p").arg("d1"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} d1 v0.0.1 ({dir}) +{compiling} d1 v0.0.1 ({dir}/d1) {running} target[..]deps[..]d1[..] running 0 tests From d3d206daadfebadf4edb7a5a9a2fdab8ec960344 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 1 Mar 2016 08:25:22 -0800 Subject: [PATCH 7/7] Add a regression test for the issue being fixed When compiling a package from two separate locations it should be cached the same way both times. --- src/cargo/ops/cargo_read_manifest.rs | 1 - tests/test_cargo_bench.rs | 6 +-- tests/test_cargo_compile_path_deps.rs | 2 +- tests/test_cargo_freshness.rs | 64 +++++++++++++++++++++++++++ tests/test_cargo_install.rs | 2 +- 5 files changed, 69 insertions(+), 6 deletions(-) diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs index 20a672e2867..ddaace3421f 100644 --- a/src/cargo/ops/cargo_read_manifest.rs +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -1,6 +1,5 @@ use std::collections::{HashMap, HashSet}; use std::fs; -use std::io::prelude::*; use std::io; use std::path::{Path, PathBuf}; diff --git a/tests/test_cargo_bench.rs b/tests/test_cargo_bench.rs index 0e16211c52e..c98035d5c80 100644 --- a/tests/test_cargo_bench.rs +++ b/tests/test_cargo_bench.rs @@ -299,7 +299,7 @@ test!(bench_with_deep_lib_dep { assert_that(p.cargo_process("bench"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) +{compiling} foo v0.0.1 ([..]) {compiling} bar v0.0.1 ({dir}) {running} target[..] @@ -705,7 +705,7 @@ test!(bench_dylib { assert_that(p.cargo_process("bench").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) +{compiling} bar v0.0.1 ({dir}/bar) {running} [..] -C opt-level=3 [..] {compiling} foo v0.0.1 ({dir}) {running} [..] -C opt-level=3 [..] @@ -732,7 +732,7 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured assert_that(p.cargo("bench").arg("-v"), execs().with_status(0) .with_stdout(&format!("\ -{fresh} bar v0.0.1 ({dir}) +{fresh} bar v0.0.1 ({dir}/bar) {fresh} foo v0.0.1 ({dir}) {running} [..]target[..]release[..]bench-[..] diff --git a/tests/test_cargo_compile_path_deps.rs b/tests/test_cargo_compile_path_deps.rs index 9eaaac0a81d..5cd97d301fd 100644 --- a/tests/test_cargo_compile_path_deps.rs +++ b/tests/test_cargo_compile_path_deps.rs @@ -517,7 +517,7 @@ Caused by: failed to read `[..]bar[..]Cargo.toml` Caused by: - No such file or directory ([..]) + [..] (os error [..]) ")); }); diff --git a/tests/test_cargo_freshness.rs b/tests/test_cargo_freshness.rs index 152190cf045..dc1a402df6d 100644 --- a/tests/test_cargo_freshness.rs +++ b/tests/test_cargo_freshness.rs @@ -288,3 +288,67 @@ test!(rerun_if_changed_in_dep { assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); }); + +test!(same_build_dir_cached_packages { + let p = project("foo") + .file("a1/Cargo.toml", r#" + [package] + name = "a1" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#) + .file("a1/src/lib.rs", "") + .file("a2/Cargo.toml", r#" + [package] + name = "a2" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#) + .file("a2/src/lib.rs", "") + .file("b/Cargo.toml", r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + [dependencies] + c = { path = "../c" } + "#) + .file("b/src/lib.rs", "") + .file("c/Cargo.toml", r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + [dependencies] + d = { path = "../d" } + "#) + .file("c/src/lib.rs", "") + .file("d/Cargo.toml", r#" + [package] + name = "d" + version = "0.0.1" + authors = [] + "#) + .file("d/src/lib.rs", "") + .file(".cargo/config", r#" + [build] + target-dir = "./target" + "#); + p.build(); + + assert_that(p.cargo("build").cwd(p.root().join("a1")), + execs().with_status(0).with_stdout(&format!("\ +{compiling} d v0.0.1 ({dir}/d) +{compiling} c v0.0.1 ({dir}/c) +{compiling} b v0.0.1 ({dir}/b) +{compiling} a1 v0.0.1 ({dir}/a1) +", compiling = COMPILING, dir = p.url()))); + assert_that(p.cargo("build").cwd(p.root().join("a2")), + execs().with_status(0).with_stdout(&format!("\ +{compiling} a2 v0.0.1 ({dir}/a2) +", compiling = COMPILING, dir = p.url()))); +}); diff --git a/tests/test_cargo_install.rs b/tests/test_cargo_install.rs index 247b654dde4..f385086ca11 100644 --- a/tests/test_cargo_install.rs +++ b/tests/test_cargo_install.rs @@ -127,7 +127,7 @@ Caused by: failed to read `[..]Cargo.toml` Caused by: - No such file or directory ([..]) + [..] (os error [..]) ")); });