From 2f2ec279fdc9fdcfe3d2a5d08b13daee171609c3 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Wed, 18 Oct 2017 22:39:59 +0200 Subject: [PATCH 1/2] Support multiple crate types/versions --- benches/std_api_crate.rs | 18 +++--- src/analysis.rs | 24 ++++---- src/lib.rs | 119 ++++++++++++++++++++------------------- src/listings/mod.rs | 2 +- src/loader.rs | 72 +++++++++++++++-------- src/lowering.rs | 64 +++++++++++---------- src/raw.rs | 87 ++++++++++++++-------------- src/test/mod.rs | 12 +--- 8 files changed, 213 insertions(+), 185 deletions(-) diff --git a/benches/std_api_crate.rs b/benches/std_api_crate.rs index 7e766f9..95f4a0f 100644 --- a/benches/std_api_crate.rs +++ b/benches/std_api_crate.rs @@ -1,3 +1,11 @@ +// Copyright 2017 The RLS Project Developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + #![feature(test)] extern crate rls_analysis; @@ -27,19 +35,13 @@ impl AnalysisLoader for TestAnalysisLoader { AnalysisHost::new_with_loader(self.clone()) } - fn set_path_prefix(&self, _path_prefix: &Path) {} + fn set_path_prefix(&mut self, _path_prefix: &Path) {} fn abs_path_prefix(&self) -> Option { panic!(); } - fn iter_paths(&self, f: F) -> Vec - where - F: Fn(&Path) -> Vec, - { - let paths = &[&self.path]; - paths.iter().flat_map(|p| f(p).into_iter()).collect() - } + fn search_directories(&self) -> Vec { vec![self.path.clone()] } } lazy_static! { diff --git a/src/analysis.rs b/src/analysis.rs index 2b5558e..b78a303 100644 --- a/src/analysis.rs +++ b/src/analysis.rs @@ -11,14 +11,15 @@ use std::path::{Path, PathBuf}; use std::time::SystemTime; use {Id, Span}; -use raw::DefKind; +use raw::{CrateId, DefKind}; +/// This is the main database that contains all the collected symbol information, +/// such as definitions, their mapping between spans, hierarchy and so on, +/// organized in a per-crate fashion. #[derive(Debug)] pub struct Analysis { - // The primary crate will have its data passed directly, not via a file, so - // there is no path for it. Because of this key into the hashmap, this means - // we can only pass the data for one crate directly. - pub per_crate: HashMap, PerCrateAnalysis>, + /// Contains lowered data with global inter-crate `Id`s per each crate. + pub per_crate: HashMap, pub doc_url_base: String, pub src_url_base: String, @@ -36,7 +37,6 @@ pub struct PerCrateAnalysis { pub globs: HashMap, pub impls: HashMap>, - pub name: String, pub root_id: Option, pub timestamp: SystemTime, } @@ -88,7 +88,6 @@ impl PerCrateAnalysis { ref_spans: HashMap::new(), globs: HashMap::new(), impls: HashMap::new(), - name: String::new(), root_id: None, timestamp, } @@ -105,20 +104,19 @@ impl Analysis { } } - pub fn timestamps(&self) -> HashMap { + pub fn timestamps(&self) -> HashMap { self.per_crate .iter() - .filter_map(|(s, pc)| s.as_ref().map(|s| (s.clone(), pc.timestamp))) + .map(|(id, c)| (id.clone(), c.timestamp.clone())) .collect() } - pub fn update(&mut self, per_crate: PerCrateAnalysis, path: Option) { - self.per_crate.insert(path, per_crate); + pub fn update(&mut self, crate_id: CrateId, per_crate: PerCrateAnalysis) { + self.per_crate.insert(crate_id, per_crate); } pub fn has_def(&self, id: Id) -> bool { - self.for_each_crate(|c| c.defs.get(&id).map(|_| ())) - .is_some() + self.per_crate.values().any(|c| c.defs.contains_key(&id)) } pub fn for_each_crate(&self, f: F) -> Option diff --git a/src/lib.rs b/src/lib.rs index 505eeed..6967398 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -27,8 +27,8 @@ mod test; pub use analysis::Def; use analysis::Analysis; -pub use raw::{name_space_for_def_kind, read_analysis_incremental, DefKind, Target}; -pub use loader::{AnalysisLoader, CargoAnalysisLoader}; +pub use raw::{name_space_for_def_kind, read_analysis_from_files, CrateId, DefKind}; +pub use loader::{AnalysisLoader, CargoAnalysisLoader, Target}; use std::collections::HashMap; use std::path::{Path, PathBuf}; @@ -39,8 +39,8 @@ use std::u64; #[derive(Debug)] pub struct AnalysisHost { analysis: Mutex>, - master_crate_map: Mutex>, - loader: L, + master_crate_map: Mutex>, + loader: Mutex, } pub type AResult = Result; @@ -72,10 +72,13 @@ impl SymbolResult { pub type Span = span::Span; +/// A common identifier for definitions, references etc. This is effectively a +/// `DefId` with globally unique crate number (instead of a compiler generated +/// crate-local number). #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, new)] pub struct Id(u64); -// Used to indicate a missing index in the Id. +/// Used to indicate a missing index in the Id. pub const NULL: Id = Id(u64::MAX); type Blacklist<'a> = &'a [&'static str]; @@ -95,10 +98,7 @@ impl AnalysisHost { AnalysisHost { analysis: Mutex::new(None), master_crate_map: Mutex::new(HashMap::new()), - loader: CargoAnalysisLoader { - path_prefix: Mutex::new(None), - target, - }, + loader: Mutex::new(CargoAnalysisLoader::new(target)), } } } @@ -108,7 +108,7 @@ impl AnalysisHost { Self { analysis: Mutex::new(None), master_crate_map: Mutex::new(HashMap::new()), - loader, + loader: Mutex::new(loader), } } @@ -117,20 +117,24 @@ impl AnalysisHost { /// passing in directly. pub fn reload_from_analysis( &self, - analysis: data::Analysis, + analysis: Vec, path_prefix: &Path, base_dir: &Path, blacklist: Blacklist, ) -> AResult<()> { self.reload_with_blacklist(path_prefix, base_dir, blacklist)?; + let crates: Vec<_> = analysis.into_iter() + .map(|analysis| raw::Crate::new(analysis, SystemTime::now())) + .collect(); + lowering::lower( - vec![raw::Crate::new(analysis, SystemTime::now(), None)], + crates, base_dir, self, - |host, per_crate, path| { + |host, per_crate, id| { let mut a = host.analysis.lock()?; - a.as_mut().unwrap().update(per_crate, path); + a.as_mut().unwrap().update(id, per_crate); Ok(()) }, ) @@ -152,29 +156,25 @@ impl AnalysisHost { base_dir, blacklist ); - let empty = { - let a = self.analysis.lock()?; - a.is_none() - }; - if empty || self.loader.needs_hard_reload(path_prefix) { + let empty = self.analysis.lock()?.is_none(); + if empty || self.loader.lock()?.needs_hard_reload(path_prefix) { return self.hard_reload_with_blacklist(path_prefix, base_dir, blacklist); } - let timestamps = { - let a = self.analysis.lock()?; - a.as_ref().unwrap().timestamps() + let timestamps = self.analysis.lock()?.as_ref().unwrap().timestamps(); + let raw_analysis = { + let loader = self.loader.lock()?; + read_analysis_from_files(&*loader, timestamps, blacklist) }; - let raw_analysis = read_analysis_incremental(&self.loader, timestamps, blacklist); - - lowering::lower(raw_analysis, base_dir, self, |host, per_crate, path| { + lowering::lower(raw_analysis, base_dir, self, |host, per_crate, id| { let mut a = host.analysis.lock()?; - a.as_mut().unwrap().update(per_crate, path); + a.as_mut().unwrap().update(id, per_crate); Ok(()) }) } - // Reloads the entire project's analysis data. + /// Reloads the entire project's analysis data. pub fn hard_reload(&self, path_prefix: &Path, base_dir: &Path) -> AResult<()> { self.hard_reload_with_blacklist(path_prefix, base_dir, &[]) } @@ -186,42 +186,42 @@ impl AnalysisHost { blacklist: Blacklist, ) -> AResult<()> { trace!("hard_reload {:?} {:?}", path_prefix, base_dir); - self.loader.set_path_prefix(path_prefix); - let raw_analysis = read_analysis_incremental(&self.loader, HashMap::new(), blacklist); - // We're going to create a dummy AnalysisHost that we will fill with data, // then once we're done, we'll swap its data into self. - let mut fresh_host = self.loader.fresh_host(); + let mut fresh_host = self.loader.lock()?.fresh_host(); fresh_host.analysis = Mutex::new(Some(Analysis::new())); - let lowering_result = lowering::lower( - raw_analysis, - base_dir, - &fresh_host, - |host, per_crate, path| { - host.analysis - .lock() - .unwrap() - .as_mut() - .unwrap() - .per_crate - .insert(path, per_crate); - Ok(()) - }, - ); - if let Err(s) = lowering_result { - let mut a = self.analysis.lock()?; - *a = None; - return Err(s); + { + let mut fresh_loader = fresh_host.loader.lock().unwrap(); + fresh_loader.set_path_prefix(path_prefix); // TODO: Needed? + + let raw_analysis = read_analysis_from_files(&*fresh_loader, + HashMap::new(), + blacklist); + lowering::lower(raw_analysis, base_dir, &fresh_host, |host, per_crate, id| { + let mut a = host.analysis.lock()?; + a.as_mut().unwrap().update(id, per_crate); + Ok(()) + })?; } - { - let mut mcm = self.master_crate_map.lock()?; - *mcm = fresh_host.master_crate_map.into_inner().unwrap(); + // To guarantee a consistent state and no corruption in case an error + // happens during reloading, we need to swap data with a dummy host in + // a single atomic step. We can't lock and swap every member at a time, + // as this can possibly lead to inconsistent state, but now this can possibly + // deadlock, which isn't that good. Ideally we should have guaranteed + // exclusive access to AnalysisHost as a whole to perform a reliable swap. + macro_rules! swap_mutex_fields { + ($($name:ident),*) => { + // First, we need exclusive access to every field before swapping + $(let mut $name = self.$name.lock()?;)* + // Then, we can swap every field + $(*$name = fresh_host.$name.into_inner().unwrap();)* + }; } - let mut a = self.analysis.lock()?; - *a = Some(fresh_host.analysis.into_inner().unwrap().unwrap()); + swap_mutex_fields!(analysis, master_crate_map, loader); + Ok(()) } @@ -275,7 +275,12 @@ impl AnalysisHost { pub fn def_roots(&self) -> AResult> { self.with_analysis(|a| { Some( - a.for_all_crates(|c| c.root_id.map(|id| vec![(id, c.name.clone())])), + a.per_crate + .iter() + .filter_map(|(crate_id, data)| { + data.root_id.map(|id| (id, crate_id.name.clone())) + }) + .collect() ) }) } @@ -448,7 +453,7 @@ impl AnalysisHost { // e.g., https://github.com/rust-lang/rust/blob/master/src/liballoc/string.rs#L261-L263 pub fn src_url(&self, span: &Span) -> AResult { // FIXME would be nice not to do this every time. - let path_prefix = &self.loader.abs_path_prefix(); + let path_prefix = self.loader.lock().unwrap().abs_path_prefix(); self.with_analysis(|a| { a.def_id_for_span(span).and_then(|id| { diff --git a/src/listings/mod.rs b/src/listings/mod.rs index 3cffb08..ee48f17 100644 --- a/src/listings/mod.rs +++ b/src/listings/mod.rs @@ -59,7 +59,7 @@ impl DirectoryListing { path: path.components() .map(|c| c.as_os_str().to_str().unwrap().to_owned()) .collect(), - files: files, + files, }) } } diff --git a/src/loader.rs b/src/loader.rs index 944a69d..15d348e 100644 --- a/src/loader.rs +++ b/src/loader.rs @@ -6,61 +6,67 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Defines an `AnalysisLoader` trait, which allows to specify directories +//! from which save-analysis JSON files can be read. Also supplies a +//! default implementation `CargoAnalysisLoader` for Cargo-emitted save-analysis +//! files. + use std::ffi::OsStr; +use std::fmt; use std::path::{Path, PathBuf}; use std::process::Command; -use std::sync::Mutex; -use raw::Target; use AnalysisHost; #[derive(Debug)] pub struct CargoAnalysisLoader { - pub path_prefix: Mutex>, + pub path_prefix: Option, pub target: Target, } +impl CargoAnalysisLoader { + pub fn new(target: Target) -> CargoAnalysisLoader { + CargoAnalysisLoader { + path_prefix: None, + target, + } + } +} + +/// Allows to specify from where and which analysis files will be considered +/// when reloading data to lower. pub trait AnalysisLoader: Sized { fn needs_hard_reload(&self, path_prefix: &Path) -> bool; fn fresh_host(&self) -> AnalysisHost; - fn set_path_prefix(&self, path_prefix: &Path); + fn set_path_prefix(&mut self, path_prefix: &Path); fn abs_path_prefix(&self) -> Option; - fn iter_paths(&self, f: F) -> Vec - where - F: Fn(&Path) -> Vec; + /// Returns every directory in which analysis files are to be considered. + fn search_directories(&self) -> Vec; } impl AnalysisLoader for CargoAnalysisLoader { fn needs_hard_reload(&self, path_prefix: &Path) -> bool { - let pp = self.path_prefix.lock().unwrap(); - pp.is_none() || pp.as_ref().unwrap() != path_prefix + self.path_prefix.as_ref().map_or(true, |p| p != path_prefix) } fn fresh_host(&self) -> AnalysisHost { - let pp = self.path_prefix.lock().unwrap(); AnalysisHost::new_with_loader(CargoAnalysisLoader { - path_prefix: Mutex::new(pp.clone()), - target: self.target, + path_prefix: self.path_prefix.clone(), + .. CargoAnalysisLoader::new(self.target) }) } - fn set_path_prefix(&self, path_prefix: &Path) { - let mut pp = self.path_prefix.lock().unwrap(); - *pp = Some(path_prefix.to_owned()) + fn set_path_prefix(&mut self, path_prefix: &Path) { + self.path_prefix = Some(path_prefix.to_owned()); } fn abs_path_prefix(&self) -> Option { - let p = self.path_prefix.lock().unwrap(); - p.as_ref() + self.path_prefix.as_ref() .map(|s| Path::new(s).canonicalize().unwrap().to_owned()) } - fn iter_paths(&self, f: F) -> Vec - where - F: Fn(&Path) -> Vec, - { - let path_prefix = self.path_prefix.lock().unwrap(); - let path_prefix = path_prefix.as_ref().unwrap(); + fn search_directories(&self) -> Vec { + let path_prefix = self.path_prefix.as_ref().unwrap(); let target = self.target.to_string(); // TODO sys_root_path allows to break out of 'sandbox' - is that Ok? @@ -82,12 +88,13 @@ impl AnalysisLoader for CargoAnalysisLoader { .join("rustlib") .join(&target_triple) .join("analysis"); - let paths = &[&libs_path, &deps_path, &principle_path]; - paths.iter().flat_map(|p| f(p).into_iter()).collect() + vec![libs_path, deps_path, principle_path] } } +// FIXME: This can fail when using a custom toolchain in rustup (often linked to +// `/$rust_repo/build/$target/stage2`) fn extract_target_triple(sys_root_path: &Path) -> String { // Extracts nightly-x86_64-pc-windows-msvc from // $HOME/.rustup/toolchains/nightly-x86_64-pc-windows-msvc @@ -123,6 +130,21 @@ fn sys_root_path() -> PathBuf { ) } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Target { + Release, + Debug, +} + +impl fmt::Display for Target { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Target::Release => write!(f, "release"), + Target::Debug => write!(f, "debug"), + } + } +} + #[cfg(test)] mod tests { use std::path::Path; diff --git a/src/lowering.rs b/src/lowering.rs index 7504a69..2d18415 100644 --- a/src/lowering.rs +++ b/src/lowering.rs @@ -6,11 +6,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// For processing the raw save-analysis data from rustc into rustw's in-memory representation. +//! For processing the raw save-analysis data from rustc into the rls +//! in-memory representation. use analysis::{Def, Glob, PerCrateAnalysis}; use data; -use raw::{self, RelationKind}; +use raw::{self, RelationKind, CrateId}; use {AResult, AnalysisHost, Id, Span, NULL}; use loader::AnalysisLoader; use util; @@ -31,7 +32,7 @@ pub fn lower( mut f: F, ) -> AResult<()> where - F: FnMut(&AnalysisHost, PerCrateAnalysis, Option) -> AResult<()>, + F: FnMut(&AnalysisHost, PerCrateAnalysis, CrateId) -> AResult<()>, L: AnalysisLoader, { let rss = util::get_resident().unwrap_or(0); @@ -40,19 +41,19 @@ where for c in raw_analysis { let t_start = Instant::now(); - let (per_crate, path) = CrateReader::read_crate(analysis, c, base_dir); + let (per_crate, id) = CrateReader::read_crate(analysis, c, base_dir); let time = t_start.elapsed(); info!( - "Lowering {:?} in {:.2}s", - path.as_ref().map(|p| p.display().to_string()), + "Lowering {} in {:.2}s", + format!("{} ({:?})", id.name, id.disambiguator), time.as_secs() as f64 + time.subsec_nanos() as f64 / 1_000_000_000.0 ); info!(" defs: {}", per_crate.defs.len()); info!(" refs: {}", per_crate.ref_spans.len()); info!(" globs: {}", per_crate.globs.len()); - f(analysis, per_crate, path)?; + f(analysis, per_crate, id)?; } let time = t_start.elapsed(); @@ -84,6 +85,9 @@ fn lower_span(raw_span: &raw::SpanData, base_dir: &Path) -> Span { ) } +/// Responsible for processing the raw `data::Analysis`, including translating +/// from local crate ids to global crate ids, and creating lowered +/// `PerCrateAnalysis`. struct CrateReader { /// This is effectively a map from local crate id -> global crate id, where /// local crate id are indices 0...external_crate_count. @@ -95,12 +99,13 @@ struct CrateReader { impl CrateReader { fn from_prelude( mut prelude: raw::CratePreludeData, - master_crate_map: &mut HashMap, + master_crate_map: &mut HashMap, base_dir: &Path, ) -> CrateReader { - fn fetch_crate_id(map: &mut HashMap, crate_name: &String) -> u32 { + fn fetch_crate_index(map: &mut HashMap, + id: data::GlobalCrateId) -> u32 { let next = map.len() as u32; - *map.entry(crate_name.clone()).or_insert(next) + *map.entry(id).or_insert(next) } // When reading a local crate and its external crates, we need to: // 1. Update a global crate id map if we encounter any new crate @@ -108,32 +113,33 @@ impl CrateReader { // map those when lowering symbols with local crate ids into global registry // It's worth noting, that we assume that local crate id is 0, whereas // the external crates will have num in 1..count contiguous range. - let crate_name = prelude.crate_id.name.clone(); - trace!("building crate map for {}", crate_name); - let id = fetch_crate_id(master_crate_map, &crate_name); - let mut crate_map = vec![id]; - trace!(" {} -> {}", crate_name, master_crate_map[&crate_name]); + let crate_id = prelude.crate_id; + trace!("building crate map for {}", crate_id.name); + let index = fetch_crate_index(master_crate_map, crate_id.clone()); + let mut crate_map = vec![index]; + trace!(" {} -> {}", crate_id.name, master_crate_map[&crate_id]); prelude.external_crates.sort_by(|a, b| a.num.cmp(&b.num)); for c in prelude.external_crates { assert!(c.num == crate_map.len() as u32); - let id = fetch_crate_id(master_crate_map, &c.id.name); - crate_map.push(id); - trace!(" {} -> {}", c.id.name, master_crate_map[&c.id.name]); + let index = fetch_crate_index(master_crate_map, c.id.clone()); + crate_map.push(index); + trace!(" {} -> {}", c.id.name, master_crate_map[&c.id]); } CrateReader { crate_map, base_dir: base_dir.to_owned(), - crate_name: crate_name, + crate_name: crate_id.name, } } + /// Lowers a given `raw::Crate` into `AnalysisHost`. fn read_crate( project_analysis: &AnalysisHost, krate: raw::Crate, base_dir: &Path, - ) -> (PerCrateAnalysis, Option) { + ) -> (PerCrateAnalysis, CrateId) { let reader = CrateReader::from_prelude( krate.analysis.prelude.unwrap(), &mut project_analysis.master_crate_map.lock().unwrap(), @@ -143,15 +149,12 @@ impl CrateReader { let mut per_crate = PerCrateAnalysis::new(krate.timestamp); let is_distro_crate = krate.analysis.config.distro_crate; - reader.read_defs(krate.analysis.defs, &mut per_crate, is_distro_crate); reader.read_imports(krate.analysis.imports, &mut per_crate, project_analysis); reader.read_refs(krate.analysis.refs, &mut per_crate, project_analysis); reader.read_impls(krate.analysis.relations, &mut per_crate, project_analysis); - per_crate.name = reader.crate_name; - - (per_crate, krate.path) + (per_crate, krate.id) } fn read_imports( @@ -165,7 +168,7 @@ impl CrateReader { if !i.value.is_empty() { // A glob import. let glob = Glob { value: i.value }; - // println!("record glob {:?} {:?}", span, glob); + trace!("record glob {:?} {:?}", span, glob); analysis.globs.insert(span, glob); } else if let Some(ref ref_id) = i.ref_id { // Import where we know the referred def. @@ -173,7 +176,7 @@ impl CrateReader { if def_id != NULL && !analysis.def_id_for_span.contains_key(&span) && (project_analysis.has_def(def_id) || analysis.defs.contains_key(&def_id)) { - // println!("record import ref {:?} {:?} {:?} {}", i, span, ref_id, def_id); + trace!("record import ref {:?} {:?} {:?} {}", i, span, ref_id, def_id); analysis.def_id_for_span.insert(span.clone(), def_id); analysis .ref_spans @@ -341,22 +344,23 @@ impl CrateReader { // } // } - /// Recreates resulting crate-local (u32, u32) id from compiler to a global - /// `u64` `Id`, mapping from a local to global crate id. + /// Recreates resulting crate-local (`u32`, `u32`) id from compiler + /// to a global `u64` `Id`, mapping from a local to global crate id. fn id_from_compiler_id(&self, id: &data::Id) -> Id { if id.krate == u32::MAX || id.index == u32::MAX { return NULL; } let krate = self.crate_map[id.krate as usize] as u64; - // Use global crate number for high order bits, then index for least significant bits. + // Use global crate number for high order bits, + // then index for least significant bits. Id((krate << 32) | (id.index as u64)) } } fn abs_ref_id( id: Id, - analysis: &mut PerCrateAnalysis, + analysis: &PerCrateAnalysis, project_analysis: &AnalysisHost, ) -> Option { if project_analysis.has_def(id) || analysis.defs.contains_key(&id) { diff --git a/src/raw.rs b/src/raw.rs index e893192..f73dd01 100644 --- a/src/raw.rs +++ b/src/raw.rs @@ -8,56 +8,48 @@ use {AnalysisLoader, Blacklist}; use listings::{DirectoryListing, ListingKind}; -pub use data::{CratePreludeData, Def, DefKind, Import, Ref, Relation, RelationKind, SigElement, - Signature, SpanData}; +pub use data::{CratePreludeData, Def, DefKind, GlobalCrateId as CrateId, Import, + Ref, Relation, RelationKind, SigElement, Signature, SpanData}; use data::Analysis; - use std::collections::HashMap; -use std::fmt; use std::fs::File; -use std::io::Read; -use std::path::{Path, PathBuf}; +use std::io::{self, Read}; +use std::path::Path; use std::time::{Instant, SystemTime}; -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Target { - Release, - Debug, +#[derive(Debug)] +pub struct Crate { + pub id: CrateId, + pub analysis: Analysis, + pub timestamp: SystemTime, } -impl fmt::Display for Target { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Target::Release => write!(f, "release"), - Target::Debug => write!(f, "debug"), +impl Crate { + pub fn new(analysis: Analysis, timestamp: SystemTime) -> Crate { + Crate { + id: analysis.prelude.as_ref().unwrap().crate_id.clone(), + analysis, + timestamp } } } -#[derive(Debug, new)] -pub struct Crate { - pub analysis: Analysis, - pub timestamp: SystemTime, - pub path: Option, -} - -pub fn read_analysis_incremental( +/// Reads raw analysis data for non-blacklisted crates from files in directories +/// pointed by `loader`. +pub fn read_analysis_from_files( loader: &L, - timestamps: HashMap, + crate_timestamps: HashMap, crate_blacklist: Blacklist, ) -> Vec { - loader.iter_paths(|p| { - let t = Instant::now(); - - let mut result = vec![]; + let mut result = vec![]; - let listing = match DirectoryListing::from_path(p) { - Ok(l) => l, - Err(_) => { - return result; - } - }; + loader.search_directories() + .iter() + .inspect(|path| trace!("Considering analysis files at {}", path.display())) + .filter_map(|p| DirectoryListing::from_path(p).ok().map(|list| (p, list))) + .for_each(|(p, listing)| { + let t = Instant::now(); for l in listing.files { info!("Considering {:?}", l); @@ -67,11 +59,20 @@ pub fn read_analysis_incremental( } let path = p.join(&l.name); - let is_fresh = timestamps.get(&path).map_or(true, |t| time > t); - if is_fresh { - read_crate_data(&path) - .map(|a| result.push(Crate::new(a, *time, Some(path)))); - } + // TODO: Bring back path-based timestamps, so we can discard + // stale data before reading the file and attempting the + // deserialization, as it can take a considerate amount of time + // for big analysis data files. + //let is_fresh = timestamps.get(&path).map_or(true, |t| time > t); + read_crate_data(&path).map(|analysis| { + let is_fresh = { + let id = &analysis.prelude.as_ref().unwrap().crate_id; + crate_timestamps.get(id).map_or(true, |t| time > t) + }; + if is_fresh { + result.push(Crate::new(analysis, *time)); + } + }); } } @@ -83,9 +84,9 @@ pub fn read_analysis_incremental( d.as_secs(), d.subsec_nanos() ); + }); - result - }) + result } fn ignore_data(file_name: &str, crate_blacklist: Blacklist) -> bool { @@ -93,13 +94,15 @@ fn ignore_data(file_name: &str, crate_blacklist: Blacklist) -> bool { .any(|name| file_name.starts_with(&format!("lib{}-", name))) } -fn read_file_contents(path: &Path) -> Result { +fn read_file_contents(path: &Path) -> io::Result { let mut file = File::open(&path)?; let mut buf = String::new(); file.read_to_string(&mut buf)?; Ok(buf) } +/// Attempts to read and deserialize `Analysis` data from a JSON file at `path`, +/// returns `Some(data)` on success. fn read_crate_data(path: &Path) -> Option { trace!("read_crate_data {:?}", path); let t = Instant::now(); diff --git a/src/test/mod.rs b/src/test/mod.rs index 9e009b5..9482fe0 100644 --- a/src/test/mod.rs +++ b/src/test/mod.rs @@ -25,19 +25,13 @@ impl AnalysisLoader for TestAnalysisLoader { AnalysisHost::new_with_loader(self.clone()) } - fn set_path_prefix(&self, _path_prefix: &Path) {} + fn set_path_prefix(&mut self, _path_prefix: &Path) {} fn abs_path_prefix(&self) -> Option { panic!(); } - fn iter_paths(&self, f: F) -> Vec - where - F: Fn(&Path) -> Vec, - { - let paths = &[&self.path]; - paths.iter().flat_map(|p| f(p).into_iter()).collect() - } + fn search_directories(&self) -> Vec { vec![self.path.clone()] } } #[test] @@ -64,7 +58,7 @@ fn doc_urls_resolve_correctly() { qualname.is_none() || def.qualname == qualname.unwrap() }) .collect(); - println!("{:#?}", defs); + println!("{}: {:#?}", type_, defs); assert_eq!(defs.len(), 1); assert_eq!(host.doc_url(&defs[0].span), Ok(url.into())); } From 08759d48db89219899ee29d52686014b403cf78b Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Fri, 3 Nov 2017 12:06:37 +0100 Subject: [PATCH 2/2] Bump version to 0.8 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 95629bd..97698c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -24,7 +24,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rls-analysis" -version = "0.7.2" +version = "0.8.0" dependencies = [ "derive-new 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index d314a7b..7e7bfdc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "rls-analysis" -version = "0.7.2" +version = "0.8.0" authors = ["Nick Cameron "] description = "Library for processing rustc's save-analysis data for the RLS" license = "Apache-2.0/MIT"