diff --git a/Cargo.lock b/Cargo.lock index 33fdc79f9a2..ff61b2b3247 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -118,13 +118,13 @@ dependencies = [ [[package]] name = "cargo" version = "0.30.0" -source = "git+https://github.com/rust-lang/cargo?rev=af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0#af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0" +source = "git+https://github.com/rust-lang/cargo?rev=1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556#1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556" dependencies = [ "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crates-io 0.18.0 (git+https://github.com/rust-lang/cargo?rev=af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0)", - "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crates-io 0.18.0 (git+https://github.com/rust-lang/cargo?rev=1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556)", + "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "crypto-hash 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "curl 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -140,13 +140,14 @@ dependencies = [ "ignore 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "jobserver 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazycell 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustfix 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.70 (registry+https://github.com/rust-lang/crates.io-index)", @@ -156,7 +157,7 @@ dependencies = [ "shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.16 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "termcolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -311,7 +312,7 @@ dependencies = [ [[package]] name = "crates-io" version = "0.18.0" -source = "git+https://github.com/rust-lang/cargo?rev=af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0#af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0" +source = "git+https://github.com/rust-lang/cargo?rev=1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556#1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556" dependencies = [ "curl 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -387,6 +388,11 @@ name = "crossbeam-utils" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "crossbeam-utils" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "crypto-hash" version = "0.3.1" @@ -774,6 +780,11 @@ name = "lazycell" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "lazycell" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "libc" version = "0.2.42" @@ -1166,7 +1177,7 @@ dependencies = [ name = "rls" version = "0.129.0" dependencies = [ - "cargo 0.30.0 (git+https://github.com/rust-lang/cargo?rev=af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0)", + "cargo 0.30.0 (git+https://github.com/rust-lang/cargo?rev=1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556)", "cargo_metadata 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "clippy_lints 0.0.212 (git+https://github.com/rust-lang-nursery/rust-clippy?rev=1f656173723dce4efc3fc90ff5763a2186ec9089)", "crossbeam-channel 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1480,6 +1491,18 @@ dependencies = [ "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rustfix" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.70 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.70 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rustfmt-nightly" version = "0.9.0" @@ -1893,7 +1916,7 @@ dependencies = [ "checksum bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c54bb8f454c567f21197eefcdbf5679d0bd99f2ddbe52e84c77061952e6789" "checksum byteorder 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "74c0b906e9446b0a2e4f760cdb3fa4b2c48cdc6db8766a845c54b6ff063fd2e9" "checksum cargo 0.28.0 (registry+https://github.com/rust-lang/crates.io-index)" = "21dd0ac7737313b8c5c6fbfaf351aa93d4e90f66d4a33a11d1f3fb29584ac631" -"checksum cargo 0.30.0 (git+https://github.com/rust-lang/cargo?rev=af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0)" = "" +"checksum cargo 0.30.0 (git+https://github.com/rust-lang/cargo?rev=1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556)" = "" "checksum cargo_metadata 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1efca0b863ca03ed4c109fb1c55e0bc4bbeb221d3e103d86251046b06a526bd0" "checksum cargo_metadata 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2d6809b327f87369e6f3651efd2c5a96c49847a3ed2559477ecba79014751ee1" "checksum cc 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)" = "2119ea4867bd2b8ed3aecab467709720b2d55b1bcfe09f772fd68066eaf15275" @@ -1909,7 +1932,7 @@ dependencies = [ "checksum core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "716c271e8613ace48344f723b60b900a93150271e5be206212d052bbc0883efa" "checksum core-foundation-sys 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a3fb15cdbdd9cf8b82d97d0296bb5cd3631bba58d6e31650a002a8e7fb5721f9" "checksum crates-io 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f78703ef5348db1e3244fb6b496e840965fb4754a5319270f2bd77ddb856e1c" -"checksum crates-io 0.18.0 (git+https://github.com/rust-lang/cargo?rev=af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0)" = "" +"checksum crates-io 0.18.0 (git+https://github.com/rust-lang/cargo?rev=1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556)" = "" "checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" "checksum crossbeam-channel 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "efff2d411e0ac3731b9f6de882b2790fdd2de651577500a806ce78b95b2b9f31" "checksum crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f739f8c5363aca78cfb059edf753d8f0d36908c348f3d8d1503f03d8b75d9cf3" @@ -1917,6 +1940,7 @@ dependencies = [ "checksum crossbeam-epoch 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "285987a59c4d91388e749850e3cb7b3a92299668528caaacd08005b8f238c0ea" "checksum crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2760899e32a1d58d5abb31129f8fae5de75220bc2176e77ff7c627ae45c918d9" "checksum crossbeam-utils 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ea52fab26a99d96cdff39d0ca75c9716125937f5dba2ab83923aaaf5928f684a" +"checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015" "checksum crypto-hash 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "09de9ee0fc255ace04c7fa0763c9395a945c37c8292bb554f8d48361d1dcf1b4" "checksum curl 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "893713db705eab9847e050268507b0e2a2aad64e90a831874bd4e8e0d67f9523" "checksum curl-sys 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "de9cf174efdf90b5887c4e2e900769373c89c5e18152e8f3ed75b501a6f1c0fb" @@ -1961,6 +1985,7 @@ dependencies = [ "checksum languageserver-types 0.45.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d91d91d1c23db74187096d191967cb49f49bb175ad6d855fa9229d16ef2c982" "checksum lazy_static 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fb497c35d362b6a331cfd94956a07fc2c78a4604cdbee844a81170386b996dd3" "checksum lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a6f08839bc70ef4a3fe1d566d5350f519c5912ea86be0df1740a7d247c7fc0ef" +"checksum lazycell 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d33a48d0365c96081958cc663eef834975cb1e8d8bea3378513fc72bdbf11e50" "checksum libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)" = "b685088df2b950fccadf07a7187c8ef846a959c142338a48f9dc0b94517eb5f1" "checksum libgit2-sys 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c9051a4b288ba6f8728e9e52bb2510816946b8bcb2e20259e4d4cdc93b9ecafd" "checksum libssh2-sys 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c628b499e8d1a4f4bd09a95d6cb1f8aeb231b46a9d40959bbd0408f14dd63adf" @@ -2035,6 +2060,7 @@ dependencies = [ "checksum rustc-rayon-core 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40f06724db71e18d68b3b946fdf890ca8c921d9edccc1404fdfdb537b0d12649" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a54aa04a10c68c1c4eacb4337fd883b435997ede17a9385784b990777686b09a" +"checksum rustfix 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "86f77b09d42bae4adfbcd105a8914e2d9fb46b63612c1a765b824a2b4a4bb814" "checksum rustfmt-nightly 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d011cbbb7bfec821e3bc9dc2cb0ff3e1f413179e129a90ba1f1d256e31b4e846" "checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637" "checksum schannel 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "dc1fabf2a7b6483a141426e1afd09ad543520a77ac49bd03c286e7696ccfd77f" diff --git a/Cargo.toml b/Cargo.toml index 2907ee08f4e..b3330c7fc64 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,7 +13,7 @@ categories = ["development-tools"] build = "build.rs" [dependencies] -cargo = { git = "https://github.com/rust-lang/cargo", rev = "af9e40c26b4ea2ebd6f31ee86ee61d5ac1c74eb0" } +cargo = { git = "https://github.com/rust-lang/cargo", rev = "1ec8c70d00afa77deafb8fbd4c1d07fb68b2b556" } cargo_metadata = "0.5.2" clippy_lints = { git = "https://github.com/rust-lang-nursery/rust-clippy", rev = "1f656173723dce4efc3fc90ff5763a2186ec9089", optional = true } env_logger = "0.5" diff --git a/src/build/cargo.rs b/src/build/cargo.rs index a1f60c99e46..adab860579f 100644 --- a/src/build/cargo.rs +++ b/src/build/cargo.rs @@ -19,6 +19,7 @@ use serde_json; use crate::actions::progress::ProgressUpdate; use rls_data::Analysis; use crate::build::{BufWriter, BuildResult, CompilationContext, Internals, PackageArg}; +use crate::build::plan::Plan; use crate::build::environment::{self, Environment, EnvironmentLock}; use crate::config::Config; use rls_vfs::Vfs; @@ -119,14 +120,7 @@ fn run_cargo( let mut restore_env = Environment::push_with_lock(&HashMap::new(), None, lock_guard); - let build_dir = { - let mut compilation_cx = compilation_cx.lock().unwrap(); - // Since Cargo build routine will try to regenerate the unit dep graph, - // we need to clear the existing dep graph. - compilation_cx.build_plan.clear(); - - compilation_cx.build_dir.as_ref().unwrap().clone() - }; + let build_dir = compilation_cx.lock().unwrap().build_dir.clone().unwrap(); // Note that this may not be equal build_dir when inside a workspace member let manifest_path = important_paths::find_root_manifest_for_wd(&build_dir)?; @@ -147,9 +141,9 @@ fn run_cargo( enable_nightly_features(); let ws = Workspace::new(&manifest_path, &config)?; - let packages = match package_arg { - PackageArg::Unknown | PackageArg::All => vec![], - PackageArg::Package(s) => vec![s] + let (all, packages) = match package_arg { + PackageArg::Default => (false, vec![]), + PackageArg::Packages(pkgs) => (false, pkgs.into_iter().collect()) }; // TODO: It might be feasible to keep this CargoOptions structure cached and regenerate @@ -172,7 +166,16 @@ fn run_cargo( (opts, rustflags, rls_config.clear_env_rust_log, rls_config.cfg_test) }; - let spec = Packages::from_flags(false, Vec::new(), packages)?; + let spec = Packages::from_flags(all, Vec::new(), packages)?; + + let pkg_names = spec.into_package_id_specs(&ws)?.iter() + .map(|pkg_spec| pkg_spec.name().to_owned()) + .collect(); + trace!("Specified packages to be built by Cargo: {:#?}", pkg_names); + + // Since Cargo build routine will try to regenerate the unit dep graph, + // we need to clear the existing dep graph. + compilation_cx.lock().unwrap().build_plan = Plan::for_packages(pkg_names); let compile_opts = CompileOptions { spec, @@ -330,7 +333,7 @@ impl Executor for RlsExecutor { self.is_primary_crate(id) } - fn exec(&self, mut cargo_cmd: ProcessBuilder, id: &PackageId, target: &Target) -> CargoResult<()> { + fn exec(&self, mut cargo_cmd: ProcessBuilder, id: &PackageId, target: &Target, mode: CompileMode) -> CargoResult<()> { // Use JSON output so that we can parse the rustc output. cargo_cmd.arg("--error-format=json"); // Delete any stale data. We try and remove any json files with @@ -461,7 +464,7 @@ impl Executor for RlsExecutor { // Cache executed command for the build plan { let mut cx = self.compilation_cx.lock().unwrap(); - cx.build_plan.cache_compiler_job(id, target, &cmd); + cx.build_plan.cache_compiler_job(id, target, mode, &cmd); } // Prepare modified cargo-generated args/envs for future rustc calls diff --git a/src/build/mod.rs b/src/build/mod.rs index c41f4b4ee1a..bfcd5529f4a 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -22,7 +22,7 @@ use log::{log, trace}; use self::environment::EnvironmentLock; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::ffi::OsString; use std::io::{self, Write}; use std::mem; @@ -159,12 +159,10 @@ impl CompilationContext { } #[derive(Debug, Clone, Eq, PartialEq)] +/// Specified set of packages to be built by Cargo. pub enum PackageArg { - Unknown, - // --all - All, - // -p ... - Package(String), + Default, + Packages(HashSet), } /// Status of the build queue. @@ -550,6 +548,8 @@ impl Internals { }; cx.build_plan.prepare_work(&manifest_path, &modified, needs_to_run_cargo) }; + trace!("Specified work: {:?}", work); + match work { // Cargo performs the full build and returns // appropriate diagnostics/analysis data diff --git a/src/build/plan.rs b/src/build/plan.rs index 87e3a70d534..c8f46230829 100644 --- a/src/build/plan.rs +++ b/src/build/plan.rs @@ -25,6 +25,7 @@ //! build scripts). use std::collections::{HashMap, HashSet}; +use std::ffi::OsStr; use std::fmt; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; @@ -32,19 +33,21 @@ use std::sync::mpsc::Sender; use crate::build::PackageArg; use cargo::core::{PackageId, Target, TargetKind}; -use cargo::core::compiler::{Context, Kind, Unit}; +use cargo::core::compiler::{CompileMode, Context, Kind, Unit}; use cargo::core::profiles::Profile; use cargo::util::{CargoResult, ProcessBuilder}; use cargo_metadata; use crate::lsp_data::parse_file_path; use url::Url; -use log::{log, trace}; +use log::{log, trace, error}; use crate::actions::progress::ProgressUpdate; use super::{BuildResult, Internals}; /// Main key type by which `Unit`s will be distinguished in the build plan. -crate type UnitKey = (PackageId, TargetKind); +/// In Target we're mostly interested in TargetKind (Lib, Bin, ...) and name +/// (e.g. we can have 2 binary targets with different names). +crate type UnitKey = (PackageId, Target, CompileMode); /// Holds the information how exactly the build will be performed for a given /// workspace with given, specified features. @@ -60,29 +63,27 @@ crate struct Plan { // An object for finding the package which a file belongs to and this inferring // a package argument. package_map: Option, - // The package argument used in the last Cargo build. - prev_package_arg: Option, + /// Packages (names) for which this build plan was prepared. + /// Used to detect if the plan can reused when building certain packages. + built_packages: HashSet, } impl Plan { crate fn new() -> Plan { + Self::for_packages(HashSet::new()) + } + + crate fn for_packages(pkgs: HashSet) -> Plan { Plan { units: HashMap::new(), dep_graph: HashMap::new(), rev_dep_graph: HashMap::new(), compiler_jobs: HashMap::new(), package_map: None, - prev_package_arg: None, + built_packages: pkgs, } } - crate fn clear(&mut self) { - self.units = HashMap::new(); - self.dep_graph = HashMap::new(); - self.rev_dep_graph = HashMap::new(); - self.compiler_jobs = HashMap::new(); - } - /// Returns whether a build plan has cached compiler invocations and dep /// graph so it's at all able to return a job queue via `prepare_work`. crate fn is_ready(&self) -> bool { @@ -92,9 +93,9 @@ impl Plan { /// Cache a given compiler invocation in `ProcessBuilder` for a given /// `PackageId` and `TargetKind` in `Target`, to be used when processing /// cached build plan. - crate fn cache_compiler_job(&mut self, id: &PackageId, target: &Target, cmd: &ProcessBuilder) { - let pkg_key = (id.clone(), target.kind().clone()); - self.compiler_jobs.insert(pkg_key, cmd.clone()); + crate fn cache_compiler_job(&mut self, id: &PackageId, target: &Target, mode: CompileMode, cmd: &ProcessBuilder) { + let unit_key = (id.clone(), target.clone(), mode); + self.compiler_jobs.insert(unit_key, cmd.clone()); } /// Emplace a given `Unit`, along with its `Unit` dependencies (recursively) @@ -172,17 +173,17 @@ impl Plan { /// never do work and always offload to Cargo in such case. /// Because of that, build scripts are checked separately and only other /// crate targets are checked with path prefixes. - fn fetch_dirty_units + fmt::Debug>(&self, files: &[T]) -> HashSet { + fn fetch_dirty_units>(&self, files: &[T]) -> HashSet { let mut result = HashSet::new(); let build_scripts: HashMap<&Path, UnitKey> = self.units .iter() - .filter(|&(&(_, ref kind), _)| *kind == TargetKind::CustomBuild) + .filter(|&(&(_, ref target, _), _)| *target.kind() == TargetKind::CustomBuild) .map(|(key, unit)| (unit.target.src_path(), key.clone())) .collect(); let other_targets: HashMap = self.units .iter() - .filter(|&(&(_, ref kind), _)| *kind != TargetKind::CustomBuild) + .filter(|&(&(_, ref target, _), _)| *target.kind() != TargetKind::CustomBuild) .map(|(key, unit)| { ( key.clone(), @@ -194,32 +195,38 @@ impl Plan { }) .collect(); - for modified in files { - if let Some(unit) = build_scripts.get(modified.as_ref()) { + for modified in files.iter().map(|x| x.as_ref()) { + if let Some(unit) = build_scripts.get(modified) { result.insert(unit.clone()); } else { - // Not a build script, so we associate a dirty package with a - // dirty file by finding longest (most specified) path prefix - let unit = other_targets.iter().max_by_key(|&(_, src_dir)| { - if !modified.as_ref().starts_with(src_dir) { - return 0; - } - modified - .as_ref() - .components() - .zip(src_dir.components()) - .take_while(|&(a, b)| a == b) + // Not a build script, so we associate a dirty file with a + // package by finding longest (most specified) path prefix. + let matching_prefix_components = |a: &Path, b: &Path| -> usize { + assert!(a.is_absolute() && b.is_absolute()); + a.components().zip(b.components()) + .skip(1) // Skip RootDir + .take_while(|&(x, y)| x == y) .count() - }); - match unit { - None => trace!( - "Modified file {:?} doesn't correspond to any package!", - modified - ), - Some(unit) => { - result.insert(unit.0.clone()); - } }; + // Since a package can correspond to many units (e.g. compiled + // as a regular binary or a test harness for unit tests), we + // collect every unit having the longest path prefix. + let max_matching_prefix = other_targets.values() + .map(|src_dir| matching_prefix_components(modified, src_dir)) + .max(); + + match max_matching_prefix { + Some(0) => error!("Modified file {} didn't correspond to any buildable unit!", + modified.display()), + Some(max) => { + let dirty_units = other_targets.iter() + .filter(|(_, dir)| max == matching_prefix_components(modified, dir)) + .map(|(unit, _)| unit); + + result.extend(dirty_units.cloned()); + } + None => {} // Possible that only build scripts were modified + } } } result @@ -312,18 +319,29 @@ impl Plan { self.package_map = Some(PackageMap::new(manifest_path)); } - let package_arg = self.package_map + if !self.is_ready() || requested_cargo { + return WorkStatus::NeedsCargo(PackageArg::Default); + } + + let dirty_packages = self.package_map .as_ref() .unwrap() - .compute_package_arg(modified); - let package_arg_changed = match self.prev_package_arg { - Some(ref ppa) => ppa != &package_arg, - None => true, - }; - self.prev_package_arg = Some(package_arg.clone()); + .compute_dirty_packages(modified); + + let needs_more_packages = dirty_packages + .difference(&self.built_packages) + .next() + .is_some(); + + let needed_packages = self.built_packages + .union(&dirty_packages) + .cloned() + .collect(); - if !self.is_ready() || requested_cargo || package_arg_changed { - return WorkStatus::NeedsCargo(package_arg); + // We modified a file from a packages, that are not included in the + // cached build plan - run Cargo to recreate the build plan including them + if needs_more_packages { + return WorkStatus::NeedsCargo(PackageArg::Packages(needed_packages)); } let dirties = self.fetch_dirty_units(modified); @@ -335,15 +353,15 @@ impl Plan { if dirties .iter() - .any(|&(_, ref kind)| *kind == TargetKind::CustomBuild) + .any(|&(_, ref target, _)| *target.kind() == TargetKind::CustomBuild) { - WorkStatus::NeedsCargo(package_arg) + WorkStatus::NeedsCargo(PackageArg::Packages(needed_packages)) } else { let graph = self.dirty_rev_dep_graph(&dirties); trace!("Constructed dirty rev dep graph: {:?}", graph); if graph.is_empty() { - return WorkStatus::NeedsCargo(package_arg); + return WorkStatus::NeedsCargo(PackageArg::Default); } let queue = self.topological_sort(&graph); @@ -362,9 +380,8 @@ impl Plan { // crates within the crate that depend on the error-ing one have never been built. // In that case we need to build from scratch so that everything is in our cache, or // we cope with the error. In the error case, jobs will be None. - match jobs { - None => WorkStatus::NeedsCargo(package_arg), + None => WorkStatus::NeedsCargo(PackageArg::Default), Some(jobs) => { assert!(!jobs.is_empty()); WorkStatus::Execute(JobQueue(jobs)) @@ -374,22 +391,19 @@ impl Plan { } } +#[derive(Debug)] crate enum WorkStatus { NeedsCargo(PackageArg), Execute(JobQueue), } -// The point of the PackageMap is to compute the minimal work for Cargo to do, -// given a list of changed files. That is, if things have changed all over the -// place we have to do a `--all` build, but if they've only changed in one -// package, then we can do `-p foo` which should mean less work. -// -// However, when we change package we throw away our build plan and must rebuild -// it, so we must be careful that compiler jobs we expect to exist will in fact -// exist. There is some wasted work too, but I don't think we can predict when -// we definitely need to do this and it should be quick compared to compilation. - -// Maps paths to packages. +/// Maps paths to packages. +/// +/// The point of the PackageMap is detect if additional packages need to be +/// included in the cached build plan. The cache can represent only a subset of +/// the entire workspace, hence why we need to detect if a package was modified +/// that's outside the cached build plan - if so, we need to recreate it, +/// including the new package. #[derive(Debug)] struct PackageMap { // A map from a manifest directory to the package name. @@ -406,7 +420,7 @@ impl PackageMap { } } - // Fine each package in the workspace and record the root directory and package name. + // Find each package in the workspace and record the root directory and package name. fn discover_package_paths(manifest_path: &Path) -> HashMap { trace!("read metadata {:?}", manifest_path); let metadata = match cargo_metadata::metadata(Some(manifest_path)) { @@ -425,19 +439,12 @@ impl PackageMap { .collect() } - // Compute the `-p` argument to pass to Cargo by examining the current dirty - // set of files and finding their package. - fn compute_package_arg + fmt::Debug>(&self, modified_files: &[T]) -> PackageArg { - let mut packages: Option> = modified_files + /// Given modified set of files, returns a set of corresponding dirty packages. + fn compute_dirty_packages + fmt::Debug>(&self, modified_files: &[T]) -> HashSet { + modified_files .iter() - .map(|p| self.map(p.as_ref())) - .collect(); - match packages { - Some(ref mut packages) if packages.len() == 1 => { - PackageArg::Package(packages.drain().next().unwrap()) - } - _ => PackageArg::All, - } + .filter_map(|p| self.map(p.as_ref())) + .collect() } // Map a file to the package which it belongs to. @@ -473,6 +480,34 @@ impl PackageMap { crate struct JobQueue(Vec); +/// Returns an immediately next argument to the one specified in a given +/// ProcessBuilder (or `None` if the searched or the next argument could not be found). +/// +/// This is useful for returning values for arguments of `--key ` format. +/// For example, if `[.., "--crate-name", "rls", ...]` arguments are specified, +/// then proc_arg(prc, "--crate-name") returns Some(&OsStr::new("rls")); +fn proc_argument_value>(prc: &ProcessBuilder, key: T) -> Option<&std::ffi::OsStr> { + let args = prc.get_args(); + let (idx, _) = args.iter().enumerate() + .find(|(_, arg)| arg.as_os_str() == key.as_ref())?; + + Some(args.get(idx + 1)?.as_os_str()) +} + +impl fmt::Debug for JobQueue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "JobQueue: [")?; + for prog in self.0.iter().rev() { + let name = proc_argument_value(prog, "--crate-name").unwrap(); + let typ_ = proc_argument_value(prog, "--crate-type") + .unwrap_or_else(|| OsStr::new("")); + write!(f, "{:?} ({:?}), ", name, typ_); + } + write!(f, "]")?; + Ok(()) + } +} + impl JobQueue { crate fn dequeue(&mut self) -> Option { self.0.pop() @@ -516,16 +551,20 @@ impl JobQueue { .expect("cannot stringify job program"); args.insert(0, program.clone()); - // Send a window/progress notification. At this point we know the percentage - // started out of the entire cached build. - // FIXME. We could communicate the "program" being built here, but - // it seems window/progress notification should have message OR percentage. + // Send a window/progress notification. { - // divide by zero is avoided by earlier assert! - let percentage = compiler_messages.len() as f64 / self.0.len() as f64; - progress_sender - .send(ProgressUpdate::Percentage(percentage)) - .expect("Failed to send progress update"); + let crate_name = proc_argument_value(&job, "--crate-name") + .and_then(|x| x.to_str()); + let update = match crate_name { + Some(name) => ProgressUpdate::Message(name.to_owned()), + None => { + // divide by zero is avoided by earlier assert! + let percentage = compiler_messages.len() as f64 / self.0.len() as f64; + ProgressUpdate::Percentage(percentage) + } + }; + + progress_sender.send(update).expect("Failed to send progress update"); } match super::rustc::rustc( @@ -571,7 +610,7 @@ impl JobQueue { } fn key_from_unit(unit: &Unit<'_>) -> UnitKey { - (unit.pkg.package_id().clone(), unit.target.kind().clone()) + (unit.pkg.package_id().clone(), unit.target.clone(), unit.mode) } macro_rules! print_dep_graph { @@ -603,6 +642,7 @@ crate struct OwnedUnit { crate target: Target, crate profile: Profile, crate kind: Kind, + crate mode: CompileMode, } impl<'a> From<&'a Unit<'a>> for OwnedUnit { @@ -612,6 +652,7 @@ impl<'a> From<&'a Unit<'a>> for OwnedUnit { target: unit.target.clone(), profile: unit.profile, kind: unit.kind, + mode: unit.mode, } } } diff --git a/tests/support/mod.rs b/tests/support/mod.rs index 3ef9c62d258..9dc18605666 100644 --- a/tests/support/mod.rs +++ b/tests/support/mod.rs @@ -71,6 +71,29 @@ pub struct ExpectedMessage { contains: Vec, } +#[derive(Debug)] +pub enum MsgMatchError { + ParseError, + MissingJsonrpc, + BadJsonrpc, + MissingId, + BadId, + StringNotFound(String, String), +} + +impl<'a, 'b> ToString for MsgMatchError { + fn to_string(&self) -> String { + match self { + MsgMatchError::ParseError => "JSON parsing failed".into(), + MsgMatchError::MissingJsonrpc => "Missing jsonrpc field".into(), + MsgMatchError::BadJsonrpc => "Bad jsonrpc field".into(), + MsgMatchError::MissingId => "Missing id field".into(), + MsgMatchError::BadId => "Unexpected id".into(), + MsgMatchError::StringNotFound(n, h) => format!("Could not find `{}` in `{}`", n, h), + } + } +} + impl ExpectedMessage { pub fn new(id: Option) -> ExpectedMessage { ExpectedMessage { @@ -83,6 +106,36 @@ impl ExpectedMessage { self.contains.push(s.to_owned()); self } + + // Err is the expected message that was not found in the given string. + pub fn try_match(&self, msg: &str) -> Result<(), MsgMatchError> { + use self::MsgMatchError::*; + + let values: serde_json::Value = serde_json::from_str(msg).map_err(|_| ParseError)?; + let jsonrpc = values.get("jsonrpc").ok_or(MissingJsonrpc)?; + if jsonrpc != "2.0" { + return Err(BadJsonrpc); + } + + if let Some(id) = self.id { + let values_id = values.get("id").ok_or(MissingId)?; + if id != values_id.as_u64().unwrap() { + return Err(BadId) + }; + } + + self.try_match_raw(msg) + } + + pub fn try_match_raw(&self, s: &str) -> Result<(), MsgMatchError> { + for c in &self.contains { + s + .find(c) + .ok_or(MsgMatchError::StringNotFound(c.to_owned(), s.to_owned())) + .map(|_| ())?; + } + Ok(()) + } } pub fn read_message(reader: &mut BufReader) -> io::Result { @@ -112,12 +165,12 @@ pub fn read_message(reader: &mut BufReader) -> io::Result { Ok(result) } +pub fn read_messages(reader: &mut BufReader, count: usize) -> Vec { + (0..count).map(|_| read_message(reader).unwrap()).collect() +} + pub fn expect_messages(reader: &mut BufReader, expected: &[&ExpectedMessage]) { - let mut results: Vec = Vec::new(); - while results.len() < expected.len() { - let msg = read_message(reader).unwrap(); - results.push(msg); - } + let results = read_messages(reader, expected.len()); println!( "expect_messages:\n results: {:#?},\n expected: {:#?}", @@ -126,30 +179,38 @@ pub fn expect_messages(reader: &mut BufReader, expected: &[&Expected ); assert_eq!(results.len(), expected.len()); for (found, expected) in results.iter().zip(expected.iter()) { - let values: serde_json::Value = serde_json::from_str(found).unwrap(); - assert!( - values - .get("jsonrpc") - .expect("Missing jsonrpc field") - .as_str() - .unwrap() == "2.0", - "Bad jsonrpc field" - ); - if let Some(id) = expected.id { - assert_eq!( - values - .get("id") - .expect("Missing id field") - .as_u64() - .unwrap(), - id, - "Unexpected id" - ); + if let Err(err) = expected.try_match(&found) { + panic!(err.to_string()); } - for c in &expected.contains { - found - .find(c) - .expect(&format!("Could not find `{}` in `{}`", c, found)); + } +} + +pub fn expect_messages_unordered(reader: &mut BufReader, expected: &[&ExpectedMessage]) { + let mut results = read_messages(reader, expected.len()); + let mut expected: Vec<&ExpectedMessage> = expected.to_owned(); + + println!( + "expect_messages_unordered:\n results: {:#?},\n expected: {:#?}", + results, + expected + ); + assert_eq!(results.len(), expected.len()); + + while !results.is_empty() && !expected.is_empty() { + let first = expected[0]; + + let opt = results.iter() + .map(|r| first.try_match(r)) + .enumerate() + .find(|(_, res)| res.is_ok()); + + match opt { + Some((idx, Ok(()))) => { + expected.remove(0); + results.remove(idx); + }, + Some((_, Err(err))) => panic!(err.to_string()), + None => panic!(format!("Could not find `{:?}` among `{:?}", first, results)) } } } @@ -232,6 +293,10 @@ impl RlsHandle { pub fn expect_messages(&mut self, expected: &[&ExpectedMessage]) { expect_messages(&mut self.stdout, expected); } + + pub fn expect_messages_unordered(&mut self, expected: &[&ExpectedMessage]) { + expect_messages_unordered(&mut self.stdout, expected); + } } #[derive(PartialEq,Clone)] diff --git a/tests/tests.rs b/tests/tests.rs index 29f321ecb13..519073c144a 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -58,7 +58,7 @@ fn cmd_test_infer_bin() { #[test] fn cmd_test_simple_workspace() { - timeout(Duration::from_secs(300), ||{ + timeout(Duration::from_secs(TIME_LIMIT_SECS), ||{ let p = project("simple_workspace") .file("Cargo.toml", r#" [workspace] @@ -144,3 +144,171 @@ fn cmd_test_simple_workspace() { rls.shutdown_exit(); }); } + +#[test] +fn changing_workspace_lib_retains_bin_diagnostics() { + timeout(Duration::from_secs(TIME_LIMIT_SECS), ||{ + let p = project("simple_workspace") + .file("Cargo.toml", r#" + [workspace] + members = [ + "library", + "binary", + ] + "#) + .file("library/Cargo.toml", r#" + [package] + name = "library" + version = "0.1.0" + authors = ["Example "] + "#) + .file("library/src/lib.rs", r#" + pub fn fetch_u32() -> u32 { + let unused = (); + 42 + } + #[cfg(test)] + mod test { + #[test] + fn my_test() { + let test_val: u32 = super::fetch_u32(); + } + } + "#) + .file("binary/Cargo.toml", r#" + [package] + name = "binary" + version = "0.1.0" + authors = ["Igor Matuszewski "] + + [dependencies] + library = { path = "../library" } + "#) + .file("binary/src/main.rs", r#" + extern crate library; + + fn main() { + let val: u32 = library::fetch_u32(); + } + "#) + .build(); + + let root_path = p.root(); + let rls_child = p.rls().spawn().unwrap(); + let mut rls = RlsHandle::new(rls_child); + + rls.request(0, "initialize", Some(json!({ + "rootPath": root_path, + "capabilities": {} + }))).unwrap(); + + rls.expect_messages(&[ + ExpectedMessage::new(Some(0)).expect_contains("capabilities"), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#""done":true"#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Indexing""#), + ]); + rls.expect_messages_unordered(&[ + ExpectedMessage::new(None).expect_contains("publishDiagnostics").expect_contains("library/src/lib.rs") + .expect_contains("unused variable: `unused`") + .expect_contains("unused variable: `test_val`"), + ExpectedMessage::new(None).expect_contains("publishDiagnostics").expect_contains("binary/src/main.rs") + .expect_contains("unused variable: `val`"), + ]); + rls.expect_messages(&[ + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#""done":true"#), + ]); + + rls.notify("textDocument/didChange", Some(json!({ + "contentChanges": [ + { + "range": { + "start": { + "line": 1, + "character": 38, + }, + "end": { + "line": 1, + "character": 41, + } + }, + "rangeLength": 3, + "text": "u64" + } + ], + "textDocument": { + "uri": format!("file://{}/library/src/lib.rs", root_path.as_path().display()), + "version": 0 + } + }))).unwrap(); + + rls.expect_messages(&[ + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#""done":true"#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Indexing""#), + ]); + rls.expect_messages_unordered(&[ + ExpectedMessage::new(None).expect_contains("publishDiagnostics").expect_contains("library/src/lib.rs") + .expect_contains("unused variable: `unused`") // Regular lib compiles + .expect_contains("expected u32, found u64"), // lib unit tests have compile errors + ExpectedMessage::new(None).expect_contains("publishDiagnostics").expect_contains("binary/src/main.rs") + .expect_contains("expected u32, found u64"), // bin depending on lib picks up type mismatch + ]); + rls.expect_messages(&[ + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Indexing""#).expect_contains(r#""done":true"#), + ]); + + rls.notify("textDocument/didChange", Some(json!({ + "contentChanges": [ + { + "range": { + "start": { + "line": 1, + "character": 38, + }, + "end": { + "line": 1, + "character": 41, + } + }, + "rangeLength": 3, + "text": "u32" + } + ], + "textDocument": { + "uri": format!("file://{}/library/src/lib.rs", root_path.as_path().display()), + "version": 1 + } + }))).unwrap(); + + rls.expect_messages(&[ + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Building""#).expect_contains(r#""done":true"#), + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Indexing""#), + ]); + rls.expect_messages_unordered(&[ + ExpectedMessage::new(None).expect_contains("publishDiagnostics").expect_contains("library/src/lib.rs") + .expect_contains("unused variable: `unused`") + .expect_contains("unused variable: `test_val`"), + ExpectedMessage::new(None).expect_contains("publishDiagnostics").expect_contains("binary/src/main.rs") + .expect_contains("unused variable: `val`"), + ]); + rls.expect_messages(&[ + ExpectedMessage::new(None).expect_contains("progress").expect_contains(r#"title":"Indexing""#).expect_contains(r#""done":true"#), + ]); + + rls.shutdown_exit(); + }); +} \ No newline at end of file