diff --git a/Cargo.lock b/Cargo.lock index 8fd365b8462b7..4c8ac69db5469 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2613,6 +2613,7 @@ dependencies = [ "serialize 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", + "tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3374,6 +3375,7 @@ dependencies = [ "arena 0.0.0", "cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_data_structures 0.0.0", + "rustc_macros 0.1.0", "scoped-tls 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "serialize 0.0.0", "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/src/doc/rustc/src/command-line-arguments.md b/src/doc/rustc/src/command-line-arguments.md index d7e789b5a11f7..dfb40284ef6cf 100644 --- a/src/doc/rustc/src/command-line-arguments.md +++ b/src/doc/rustc/src/command-line-arguments.md @@ -86,6 +86,13 @@ This flag will set which lints should be set to the [deny level](lints/levels.ht This flag will set which lints should be set to the [forbid level](lints/levels.html#forbid). +## `-Z`: set unstable options + +This flag will allow you to set unstable options of rustc. In order to set multiple options, +the -Z flag can be used multiple times. For example: `rustc -Z verbose -Z time`. +Specifying options with -Z is only available on nightly. To view all available options +run: `rustc -Z help`. + ## `--cap-lints`: set the most restrictive lint level This flag lets you 'cap' lints, for more, [see here](lints/levels.html#capping-lints). diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index 807b35e1af10b..14908108fc5a6 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -286,13 +286,13 @@ pub struct AssertParamIsEq { _field: ::marker::PhantomData } #[derive(Clone, Copy, PartialEq, Debug, Hash)] #[stable(feature = "rust1", since = "1.0.0")] pub enum Ordering { - /// An ordering where a compared value is less [than another]. + /// An ordering where a compared value is less than another. #[stable(feature = "rust1", since = "1.0.0")] Less = -1, - /// An ordering where a compared value is equal [to another]. + /// An ordering where a compared value is equal to another. #[stable(feature = "rust1", since = "1.0.0")] Equal = 0, - /// An ordering where a compared value is greater [than another]. + /// An ordering where a compared value is greater than another. #[stable(feature = "rust1", since = "1.0.0")] Greater = 1, } diff --git a/src/libcore/ops/arith.rs b/src/libcore/ops/arith.rs index 0688a606591e4..8139305f5302d 100644 --- a/src/libcore/ops/arith.rs +++ b/src/libcore/ops/arith.rs @@ -220,21 +220,21 @@ sub_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// // derive `Eq` and `PartialEq`. /// #[derive(Debug, Eq, PartialEq)] /// struct Rational { -/// nominator: usize, +/// numerator: usize, /// denominator: usize, /// } /// /// impl Rational { -/// fn new(nominator: usize, denominator: usize) -> Self { +/// fn new(numerator: usize, denominator: usize) -> Self { /// if denominator == 0 { /// panic!("Zero is an invalid denominator!"); /// } /// /// // Reduce to lowest terms by dividing by the greatest common /// // divisor. -/// let gcd = gcd(nominator, denominator); +/// let gcd = gcd(numerator, denominator); /// Rational { -/// nominator: nominator / gcd, +/// numerator: numerator / gcd, /// denominator: denominator / gcd, /// } /// } @@ -245,9 +245,9 @@ sub_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// type Output = Self; /// /// fn mul(self, rhs: Self) -> Self { -/// let nominator = self.nominator * rhs.nominator; +/// let numerator = self.numerator * rhs.numerator; /// let denominator = self.denominator * rhs.denominator; -/// Rational::new(nominator, denominator) +/// Rational::new(numerator, denominator) /// } /// } /// @@ -340,21 +340,21 @@ mul_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// // derive `Eq` and `PartialEq`. /// #[derive(Debug, Eq, PartialEq)] /// struct Rational { -/// nominator: usize, +/// numerator: usize, /// denominator: usize, /// } /// /// impl Rational { -/// fn new(nominator: usize, denominator: usize) -> Self { +/// fn new(numerator: usize, denominator: usize) -> Self { /// if denominator == 0 { /// panic!("Zero is an invalid denominator!"); /// } /// /// // Reduce to lowest terms by dividing by the greatest common /// // divisor. -/// let gcd = gcd(nominator, denominator); +/// let gcd = gcd(numerator, denominator); /// Rational { -/// nominator: nominator / gcd, +/// numerator: numerator / gcd, /// denominator: denominator / gcd, /// } /// } @@ -365,13 +365,13 @@ mul_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// type Output = Self; /// /// fn div(self, rhs: Self) -> Self::Output { -/// if rhs.nominator == 0 { +/// if rhs.numerator == 0 { /// panic!("Cannot divide by zero-valued `Rational`!"); /// } /// -/// let nominator = self.nominator * rhs.denominator; -/// let denominator = self.denominator * rhs.nominator; -/// Rational::new(nominator, denominator) +/// let numerator = self.numerator * rhs.denominator; +/// let denominator = self.denominator * rhs.numerator; +/// Rational::new(numerator, denominator) /// } /// } /// diff --git a/src/libcore/task/wake.rs b/src/libcore/task/wake.rs index 979a0792608c1..006cbbb6ce6bd 100644 --- a/src/libcore/task/wake.rs +++ b/src/libcore/task/wake.rs @@ -72,10 +72,18 @@ pub struct RawWakerVTable { /// This function will be called when `wake` is called on the [`Waker`]. /// It must wake up the task associated with this [`RawWaker`]. /// - /// The implemention of this function must not consume the provided data - /// pointer. + /// The implementation of this function must make sure to release any + /// resources that are associated with this instance of a [`RawWaker`] and + /// associated task. wake: unsafe fn(*const ()), + /// This function will be called when `wake_by_ref` is called on the [`Waker`]. + /// It must wake up the task associated with this [`RawWaker`]. + /// + /// This function is similar to `wake`, but must not consume the provided data + /// pointer. + wake_by_ref: unsafe fn(*const ()), + /// This function gets called when a [`RawWaker`] gets dropped. /// /// The implementation of this function must make sure to release any @@ -85,8 +93,8 @@ pub struct RawWakerVTable { } impl RawWakerVTable { - /// Creates a new `RawWakerVTable` from the provided `clone`, `wake`, and - /// `drop` functions. + /// Creates a new `RawWakerVTable` from the provided `clone`, `wake`, + /// `wake_by_ref`, and `drop` functions. /// /// # `clone` /// @@ -103,7 +111,16 @@ impl RawWakerVTable { /// This function will be called when `wake` is called on the [`Waker`]. /// It must wake up the task associated with this [`RawWaker`]. /// - /// The implemention of this function must not consume the provided data + /// The implementation of this function must make sure to release any + /// resources that are associated with this instance of a [`RawWaker`] and + /// associated task. + /// + /// # `wake_by_ref` + /// + /// This function will be called when `wake_by_ref` is called on the [`Waker`]. + /// It must wake up the task associated with this [`RawWaker`]. + /// + /// This function is similar to `wake`, but must not consume the provided data /// pointer. /// /// # `drop` @@ -120,11 +137,13 @@ impl RawWakerVTable { pub const fn new( clone: unsafe fn(*const ()) -> RawWaker, wake: unsafe fn(*const ()), + wake_by_ref: unsafe fn(*const ()), drop: unsafe fn(*const ()), ) -> Self { Self { clone, wake, + wake_by_ref, drop, } } @@ -187,14 +206,33 @@ unsafe impl Sync for Waker {} impl Waker { /// Wake up the task associated with this `Waker`. #[inline] - pub fn wake(&self) { + pub fn wake(self) { // The actual wakeup call is delegated through a virtual function call // to the implementation which is defined by the executor. + let wake = self.waker.vtable.wake; + let data = self.waker.data; - // SAFETY: This is safe because `Waker::new_unchecked` is the only way + // Don't call `drop` -- the waker will be consumed by `wake`. + crate::mem::forget(self); + + // SAFETY: This is safe because `Waker::from_raw` is the only way // to initialize `wake` and `data` requiring the user to acknowledge // that the contract of `RawWaker` is upheld. - unsafe { (self.waker.vtable.wake)(self.waker.data) } + unsafe { (wake)(data) }; + } + + /// Wake up the task associated with this `Waker` without consuming the `Waker`. + /// + /// This is similar to `wake`, but may be slightly less efficient in the case + /// where an owned `Waker` is available. This method should be preferred to + /// calling `waker.clone().wake()`. + #[inline] + pub fn wake_by_ref(&self) { + // The actual wakeup call is delegated through a virtual function call + // to the implementation which is defined by the executor. + + // SAFETY: see `wake` + unsafe { (self.waker.vtable.wake_by_ref)(self.waker.data) } } /// Returns `true` if this `Waker` and another `Waker` have awoken the same task. @@ -215,7 +253,7 @@ impl Waker { /// in [`RawWaker`]'s and [`RawWakerVTable`]'s documentation is not upheld. /// Therefore this method is unsafe. #[inline] - pub unsafe fn new_unchecked(waker: RawWaker) -> Waker { + pub unsafe fn from_raw(waker: RawWaker) -> Waker { Waker { waker, } @@ -226,7 +264,7 @@ impl Clone for Waker { #[inline] fn clone(&self) -> Self { Waker { - // SAFETY: This is safe because `Waker::new_unchecked` is the only way + // SAFETY: This is safe because `Waker::from_raw` is the only way // to initialize `clone` and `data` requiring the user to acknowledge // that the contract of [`RawWaker`] is upheld. waker: unsafe { (self.waker.vtable.clone)(self.waker.data) }, @@ -237,7 +275,7 @@ impl Clone for Waker { impl Drop for Waker { #[inline] fn drop(&mut self) { - // SAFETY: This is safe because `Waker::new_unchecked` is the only way + // SAFETY: This is safe because `Waker::from_raw` is the only way // to initialize `drop` and `data` requiring the user to acknowledge // that the contract of `RawWaker` is upheld. unsafe { (self.waker.vtable.drop)(self.waker.data) } diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 0a65473de8ff1..d1020a2d151d4 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -622,7 +622,7 @@ impl<'a> State<'a> { } hir::ItemKind::GlobalAsm(ref ga) => { self.head(visibility_qualified(&item.vis, "global asm"))?; - self.s.word(ga.asm.as_str().get())?; + self.s.word(ga.asm.as_str().to_string())?; self.end()? } hir::ItemKind::Ty(ref ty, ref generics) => { @@ -1591,7 +1591,7 @@ impl<'a> State<'a> { if ident.is_raw_guess() { self.s.word(format!("r#{}", ident.name))?; } else { - self.s.word(ident.as_str().get())?; + self.s.word(ident.as_str().to_string())?; } self.ann.post(self, AnnNode::Name(&ident.name)) } @@ -1998,7 +1998,7 @@ impl<'a> State<'a> { self.commasep(Inconsistent, &decl.inputs, |s, ty| { s.ibox(indent_unit)?; if let Some(arg_name) = arg_names.get(i) { - s.s.word(arg_name.as_str().get())?; + s.s.word(arg_name.as_str().to_string())?; s.s.word(":")?; s.s.space()?; } else if let Some(body_id) = body_id { diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index e4890977c9bd6..d22de6c647699 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -199,6 +199,7 @@ pub trait CrateStore { // "queries" used in resolve that aren't tracked for incremental compilation fn crate_name_untracked(&self, cnum: CrateNum) -> Symbol; + fn crate_is_private_dep_untracked(&self, cnum: CrateNum) -> bool; fn crate_disambiguator_untracked(&self, cnum: CrateNum) -> CrateDisambiguator; fn crate_hash_untracked(&self, cnum: CrateNum) -> Svh; fn extern_mod_stmt_cnum_untracked(&self, emod_id: ast::NodeId) -> Option; diff --git a/src/librustc/middle/lib_features.rs b/src/librustc/middle/lib_features.rs index ee8dd9e58b5ff..e79ef8bfc6fb4 100644 --- a/src/librustc/middle/lib_features.rs +++ b/src/librustc/middle/lib_features.rs @@ -8,7 +8,7 @@ use crate::ty::TyCtxt; use crate::hir::intravisit::{self, NestedVisitorMap, Visitor}; use syntax::symbol::Symbol; use syntax::ast::{Attribute, MetaItem, MetaItemKind}; -use syntax_pos::Span; +use syntax_pos::{Span, symbols}; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use rustc_macros::HashStable; use errors::DiagnosticId; @@ -51,12 +51,12 @@ impl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> { } fn extract(&self, attr: &Attribute) -> Option<(Symbol, Option, Span)> { - let stab_attrs = vec!["stable", "unstable", "rustc_const_unstable"]; + let stab_attrs = [symbols::stable, symbols::unstable, symbols::rustc_const_unstable]; // Find a stability attribute (i.e., `#[stable (..)]`, `#[unstable (..)]`, // `#[rustc_const_unstable (..)]`). if let Some(stab_attr) = stab_attrs.iter().find(|stab_attr| { - attr.check_name(stab_attr) + attr.check_name(**stab_attr) }) { let meta_item = attr.meta(); if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta_item { diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 988e4a9ff23a2..f95b63344fc58 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -268,22 +268,29 @@ impl OutputTypes { // DO NOT switch BTreeMap or BTreeSet out for an unsorted container type! That // would break dependency tracking for command-line arguments. #[derive(Clone, Hash)] -pub struct Externs(BTreeMap>>); +pub struct Externs(BTreeMap); + +#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug, Default)] +pub struct ExternEntry { + pub locations: BTreeSet>, + pub is_private_dep: bool +} impl Externs { - pub fn new(data: BTreeMap>>) -> Externs { + pub fn new(data: BTreeMap) -> Externs { Externs(data) } - pub fn get(&self, key: &str) -> Option<&BTreeSet>> { + pub fn get(&self, key: &str) -> Option<&ExternEntry> { self.0.get(key) } - pub fn iter<'a>(&'a self) -> BTreeMapIter<'a, String, BTreeSet>> { + pub fn iter<'a>(&'a self) -> BTreeMapIter<'a, String, ExternEntry> { self.0.iter() } } + macro_rules! hash_option { ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [UNTRACKED]) => ({}); ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [TRACKED]) => ({ @@ -412,10 +419,6 @@ top_level_options!( remap_path_prefix: Vec<(PathBuf, PathBuf)> [UNTRACKED], edition: Edition [TRACKED], - - // The list of crates to consider private when - // checking leaked private dependency types in public interfaces - extern_private: Vec [TRACKED], } ); @@ -618,7 +621,6 @@ impl Default for Options { cli_forced_thinlto_off: false, remap_path_prefix: Vec::new(), edition: DEFAULT_EDITION, - extern_private: Vec::new() } } } @@ -2290,10 +2292,14 @@ pub fn build_session_options_and_crate_config( ) } - let extern_private = matches.opt_strs("extern-private"); + // We start out with a Vec<(Option, bool)>>, + // and later convert it into a BTreeSet<(Option, bool)> + // This allows to modify entries in-place to set their correct + // 'public' value + let mut externs: BTreeMap = BTreeMap::new(); + for (arg, private) in matches.opt_strs("extern").into_iter().map(|v| (v, false)) + .chain(matches.opt_strs("extern-private").into_iter().map(|v| (v, true))) { - let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new(); - for arg in matches.opt_strs("extern").into_iter().chain(matches.opt_strs("extern-private")) { let mut parts = arg.splitn(2, '='); let name = parts.next().unwrap_or_else(|| early_error(error_format, "--extern value must not be empty")); @@ -2306,10 +2312,17 @@ pub fn build_session_options_and_crate_config( ); }; - externs + let entry = externs .entry(name.to_owned()) - .or_default() - .insert(location); + .or_default(); + + + entry.locations.insert(location.clone()); + + // Crates start out being not private, + // and go to being private if we see an '--extern-private' + // flag + entry.is_private_dep |= private; } let crate_name = matches.opt_str("crate-name"); @@ -2361,7 +2374,6 @@ pub fn build_session_options_and_crate_config( cli_forced_thinlto_off: disable_thinlto, remap_path_prefix, edition, - extern_private }, cfg, ) @@ -2625,7 +2637,7 @@ mod tests { build_session_options_and_crate_config, to_crate_config }; - use crate::session::config::{LtoCli, LinkerPluginLto}; + use crate::session::config::{LtoCli, LinkerPluginLto, ExternEntry}; use crate::session::build_session; use crate::session::search_paths::SearchPath; use std::collections::{BTreeMap, BTreeSet}; @@ -2638,6 +2650,19 @@ mod tests { use syntax; use super::Options; + impl ExternEntry { + fn new_public, + I: IntoIterator>>(locations: I) -> ExternEntry { + let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into())) + .collect(); + + ExternEntry { + locations, + is_private_dep: false + } + } + } + fn optgroups() -> getopts::Options { let mut opts = getopts::Options::new(); for group in super::rustc_optgroups() { @@ -2650,10 +2675,6 @@ mod tests { BTreeMap::from_iter(entries.into_iter()) } - fn mk_set(entries: Vec) -> BTreeSet { - BTreeSet::from_iter(entries.into_iter()) - } - // When the user supplies --test we should implicitly supply --cfg test #[test] fn test_switch_implies_cfg_test() { @@ -2771,33 +2792,33 @@ mod tests { v1.externs = Externs::new(mk_map(vec![ ( String::from("a"), - mk_set(vec![Some(String::from("b")), Some(String::from("c"))]), + ExternEntry::new_public(vec![Some("b"), Some("c")]) ), ( String::from("d"), - mk_set(vec![Some(String::from("e")), Some(String::from("f"))]), + ExternEntry::new_public(vec![Some("e"), Some("f")]) ), ])); v2.externs = Externs::new(mk_map(vec![ ( String::from("d"), - mk_set(vec![Some(String::from("e")), Some(String::from("f"))]), + ExternEntry::new_public(vec![Some("e"), Some("f")]) ), ( String::from("a"), - mk_set(vec![Some(String::from("b")), Some(String::from("c"))]), + ExternEntry::new_public(vec![Some("b"), Some("c")]) ), ])); v3.externs = Externs::new(mk_map(vec![ ( String::from("a"), - mk_set(vec![Some(String::from("b")), Some(String::from("c"))]), + ExternEntry::new_public(vec![Some("b"), Some("c")]) ), ( String::from("d"), - mk_set(vec![Some(String::from("f")), Some(String::from("e"))]), + ExternEntry::new_public(vec![Some("f"), Some("e")]) ), ])); diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index aa5610739fd6d..12319315d418d 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1388,6 +1388,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } + /// Returns whether or not the crate with CrateNum 'cnum' + /// is marked as a private dependency + pub fn is_private_dep(self, cnum: CrateNum) -> bool { + if cnum == LOCAL_CRATE { + false + } else { + self.cstore.crate_is_private_dep_untracked(cnum) + } + } + #[inline] pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash { if def_id.is_local() { diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index e01b50113b9fe..fd1d3a91ede1f 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -920,6 +920,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { niche_variants, niche_start, }, + discr_index: 0, variants: st, }, fields: FieldPlacement::Arbitrary { @@ -1142,6 +1143,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { variants: Variants::Multiple { discr: tag, discr_kind: DiscriminantKind::Tag, + discr_index: 0, variants: layout_variants, }, fields: FieldPlacement::Arbitrary { @@ -1884,10 +1886,12 @@ impl<'a> HashStable> for Variants { Multiple { ref discr, ref discr_kind, + discr_index, ref variants, } => { discr.hash_stable(hcx, hasher); discr_kind.hash_stable(hcx, hasher); + discr_index.hash_stable(hcx, hasher); variants.hash_stable(hcx, hasher); } } diff --git a/src/librustc_codegen_llvm/back/archive.rs b/src/librustc_codegen_llvm/back/archive.rs index 3fb9d4b5b776b..e0e26e9af2537 100644 --- a/src/librustc_codegen_llvm/back/archive.rs +++ b/src/librustc_codegen_llvm/back/archive.rs @@ -7,14 +7,13 @@ use std::path::{Path, PathBuf}; use std::ptr; use std::str; -use crate::back::bytecode::RLIB_BYTECODE_EXTENSION; use crate::llvm::archive_ro::{ArchiveRO, Child}; use crate::llvm::{self, ArchiveKind}; -use crate::metadata::METADATA_FILENAME; -use rustc_codegen_ssa::back::archive::find_library; +use rustc_codegen_ssa::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION}; +use rustc_codegen_ssa::back::archive::{ArchiveBuilder, find_library}; use rustc::session::Session; -pub struct ArchiveConfig<'a> { +struct ArchiveConfig<'a> { pub sess: &'a Session, pub dst: PathBuf, pub src: Option, @@ -23,7 +22,7 @@ pub struct ArchiveConfig<'a> { /// Helper for adding many files to an archive. #[must_use = "must call build() to finish building the archive"] -pub struct ArchiveBuilder<'a> { +pub struct LlvmArchiveBuilder<'a> { config: ArchiveConfig<'a>, removals: Vec, additions: Vec, @@ -49,11 +48,26 @@ fn is_relevant_child(c: &Child<'_>) -> bool { } } -impl<'a> ArchiveBuilder<'a> { +fn archive_config<'a>(sess: &'a Session, + output: &Path, + input: Option<&Path>) -> ArchiveConfig<'a> { + use rustc_codegen_ssa::back::link::archive_search_paths; + ArchiveConfig { + sess, + dst: output.to_path_buf(), + src: input.map(|p| p.to_path_buf()), + lib_search_paths: archive_search_paths(sess), + } +} + +impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> { /// Creates a new static archive, ready for modifying the archive specified /// by `config`. - pub fn new(config: ArchiveConfig<'a>) -> ArchiveBuilder<'a> { - ArchiveBuilder { + fn new(sess: &'a Session, + output: &Path, + input: Option<&Path>) -> LlvmArchiveBuilder<'a> { + let config = archive_config(sess, output, input); + LlvmArchiveBuilder { config, removals: Vec::new(), additions: Vec::new(), @@ -63,12 +77,12 @@ impl<'a> ArchiveBuilder<'a> { } /// Removes a file from this archive - pub fn remove_file(&mut self, file: &str) { + fn remove_file(&mut self, file: &str) { self.removals.push(file.to_string()); } /// Lists all files in an archive - pub fn src_files(&mut self) -> Vec { + fn src_files(&mut self) -> Vec { if self.src_archive().is_none() { return Vec::new() } @@ -84,18 +98,9 @@ impl<'a> ArchiveBuilder<'a> { .collect() } - fn src_archive(&mut self) -> Option<&ArchiveRO> { - if let Some(ref a) = self.src_archive { - return a.as_ref() - } - let src = self.config.src.as_ref()?; - self.src_archive = Some(ArchiveRO::open(src).ok()); - self.src_archive.as_ref().unwrap().as_ref() - } - /// Adds all of the contents of a native library to this archive. This will /// search in the relevant locations for a library named `name`. - pub fn add_native_library(&mut self, name: &str) { + fn add_native_library(&mut self, name: &str) { let location = find_library(name, &self.config.lib_search_paths, self.config.sess); self.add_archive(&location, |_| false).unwrap_or_else(|e| { @@ -109,7 +114,7 @@ impl<'a> ArchiveBuilder<'a> { /// /// This ignores adding the bytecode from the rlib, and if LTO is enabled /// then the object file also isn't added. - pub fn add_rlib(&mut self, + fn add_rlib(&mut self, rlib: &Path, name: &str, lto: bool, @@ -141,23 +146,8 @@ impl<'a> ArchiveBuilder<'a> { }) } - fn add_archive(&mut self, archive: &Path, skip: F) - -> io::Result<()> - where F: FnMut(&str) -> bool + 'static - { - let archive = match ArchiveRO::open(archive) { - Ok(ar) => ar, - Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)), - }; - self.additions.push(Addition::Archive { - archive, - skip: Box::new(skip), - }); - Ok(()) - } - /// Adds an arbitrary file to this archive - pub fn add_file(&mut self, file: &Path) { + fn add_file(&mut self, file: &Path) { let name = file.file_name().unwrap().to_str().unwrap(); self.additions.push(Addition::File { path: file.to_path_buf(), @@ -167,13 +157,13 @@ impl<'a> ArchiveBuilder<'a> { /// Indicate that the next call to `build` should update all symbols in /// the archive (equivalent to running 'ar s' over it). - pub fn update_symbols(&mut self) { + fn update_symbols(&mut self) { self.should_update_symbols = true; } /// Combine the provided files, rlibs, and native libraries into a single /// `Archive`. - pub fn build(&mut self) { + fn build(mut self) { let kind = self.llvm_archive_kind().unwrap_or_else(|kind| self.config.sess.fatal(&format!("Don't know how to build archive of type: {}", kind))); @@ -182,6 +172,32 @@ impl<'a> ArchiveBuilder<'a> { } } +} + +impl<'a> LlvmArchiveBuilder<'a> { + fn src_archive(&mut self) -> Option<&ArchiveRO> { + if let Some(ref a) = self.src_archive { + return a.as_ref() + } + let src = self.config.src.as_ref()?; + self.src_archive = Some(ArchiveRO::open(src).ok()); + self.src_archive.as_ref().unwrap().as_ref() + } + + fn add_archive(&mut self, archive: &Path, skip: F) + -> io::Result<()> + where F: FnMut(&str) -> bool + 'static + { + let archive = match ArchiveRO::open(archive) { + Ok(ar) => ar, + Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)), + }; + self.additions.push(Addition::Archive { + archive, + skip: Box::new(skip), + }); + Ok(()) + } fn llvm_archive_kind(&self) -> Result { let kind = &*self.config.sess.target.target.options.archive_format; diff --git a/src/librustc_codegen_llvm/back/bytecode.rs b/src/librustc_codegen_llvm/back/bytecode.rs index 8b288c45336b7..397cdecbb6f77 100644 --- a/src/librustc_codegen_llvm/back/bytecode.rs +++ b/src/librustc_codegen_llvm/back/bytecode.rs @@ -37,8 +37,6 @@ pub const RLIB_BYTECODE_OBJECT_MAGIC: &[u8] = b"RUST_OBJECT"; // The version number this compiler will write to bytecode objects in rlibs pub const RLIB_BYTECODE_OBJECT_VERSION: u8 = 2; -pub const RLIB_BYTECODE_EXTENSION: &str = "bc.z"; - pub fn encode(identifier: &str, bytecode: &[u8]) -> Vec { let mut encoded = Vec::new(); diff --git a/src/librustc_codegen_llvm/back/link.rs b/src/librustc_codegen_llvm/back/link.rs deleted file mode 100644 index 19419a72b94dd..0000000000000 --- a/src/librustc_codegen_llvm/back/link.rs +++ /dev/null @@ -1,1494 +0,0 @@ -use super::archive::{ArchiveBuilder, ArchiveConfig}; -use super::bytecode::RLIB_BYTECODE_EXTENSION; -use super::rpath::RPathConfig; -use super::rpath; -use crate::back::wasm; -use crate::metadata::METADATA_FILENAME; -use crate::context::get_reloc_model; -use crate::llvm; -use rustc_codegen_ssa::back::linker::Linker; -use rustc_codegen_ssa::back::link::{remove, ignored_for_lto, each_linked_rlib, linker_and_flavor, - get_linker}; -use rustc_codegen_ssa::back::command::Command; -use rustc::session::config::{self, DebugInfo, OutputFilenames, OutputType, PrintRequest}; -use rustc::session::config::{RUST_CGU_EXT, Lto, Sanitizer}; -use rustc::session::filesearch; -use rustc::session::search_paths::PathKind; -use rustc::session::Session; -use rustc::middle::cstore::{NativeLibrary, NativeLibraryKind}; -use rustc::middle::dependency_format::Linkage; -use rustc_codegen_ssa::CodegenResults; -use rustc::util::common::{time, time_ext}; -use rustc_fs_util::fix_windows_verbatim_for_gcc; -use rustc::hir::def_id::CrateNum; -use tempfile::{Builder as TempFileBuilder, TempDir}; -use rustc_target::spec::{PanicStrategy, RelroLevel, LinkerFlavor}; -use rustc_data_structures::fx::FxHashSet; - -use std::ascii; -use std::char; -use std::env; -use std::fmt; -use std::fs; -use std::io; -use std::iter; -use std::path::{Path, PathBuf}; -use std::process::{Output, Stdio}; -use std::str; -use syntax::attr; - -pub use rustc_codegen_utils::link::{find_crate_name, filename_for_input, default_output_for_target, - invalid_output_for_target, filename_for_metadata, - out_filename, check_file_is_writeable}; - - -/// Performs the linkage portion of the compilation phase. This will generate all -/// of the requested outputs for this compilation session. -pub(crate) fn link_binary(sess: &Session, - codegen_results: &CodegenResults, - outputs: &OutputFilenames, - crate_name: &str) -> Vec { - let mut out_filenames = Vec::new(); - for &crate_type in sess.crate_types.borrow().iter() { - // Ignore executable crates if we have -Z no-codegen, as they will error. - let output_metadata = sess.opts.output_types.contains_key(&OutputType::Metadata); - if (sess.opts.debugging_opts.no_codegen || !sess.opts.output_types.should_codegen()) && - !output_metadata && - crate_type == config::CrateType::Executable { - continue; - } - - if invalid_output_for_target(sess, crate_type) { - bug!("invalid output type `{:?}` for target os `{}`", - crate_type, sess.opts.target_triple); - } - let out_files = link_binary_output(sess, - codegen_results, - crate_type, - outputs, - crate_name); - out_filenames.extend(out_files); - } - - // Remove the temporary object file and metadata if we aren't saving temps - if !sess.opts.cg.save_temps { - if sess.opts.output_types.should_codegen() && !preserve_objects_for_their_debuginfo(sess) { - for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { - remove(sess, obj); - } - } - for obj in codegen_results.modules.iter().filter_map(|m| m.bytecode_compressed.as_ref()) { - remove(sess, obj); - } - if let Some(ref obj) = codegen_results.metadata_module.object { - remove(sess, obj); - } - if let Some(ref allocator) = codegen_results.allocator_module { - if let Some(ref obj) = allocator.object { - remove(sess, obj); - } - if let Some(ref bc) = allocator.bytecode_compressed { - remove(sess, bc); - } - } - } - - out_filenames -} - -/// Returns a boolean indicating whether we should preserve the object files on -/// the filesystem for their debug information. This is often useful with -/// split-dwarf like schemes. -fn preserve_objects_for_their_debuginfo(sess: &Session) -> bool { - // If the objects don't have debuginfo there's nothing to preserve. - if sess.opts.debuginfo == DebugInfo::None { - return false - } - - // If we're only producing artifacts that are archives, no need to preserve - // the objects as they're losslessly contained inside the archives. - let output_linked = sess.crate_types.borrow() - .iter() - .any(|&x| x != config::CrateType::Rlib && x != config::CrateType::Staticlib); - if !output_linked { - return false - } - - // If we're on OSX then the equivalent of split dwarf is turned on by - // default. The final executable won't actually have any debug information - // except it'll have pointers to elsewhere. Historically we've always run - // `dsymutil` to "link all the dwarf together" but this is actually sort of - // a bummer for incremental compilation! (the whole point of split dwarf is - // that you don't do this sort of dwarf link). - // - // Basically as a result this just means that if we're on OSX and we're - // *not* running dsymutil then the object files are the only source of truth - // for debug information, so we must preserve them. - if sess.target.target.options.is_like_osx { - match sess.opts.debugging_opts.run_dsymutil { - // dsymutil is not being run, preserve objects - Some(false) => return true, - - // dsymutil is being run, no need to preserve the objects - Some(true) => return false, - - // The default historical behavior was to always run dsymutil, so - // we're preserving that temporarily, but we're likely to switch the - // default soon. - None => return false, - } - } - - false -} - -fn link_binary_output(sess: &Session, - codegen_results: &CodegenResults, - crate_type: config::CrateType, - outputs: &OutputFilenames, - crate_name: &str) -> Vec { - for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { - check_file_is_writeable(obj, sess); - } - - let mut out_filenames = vec![]; - - if outputs.outputs.contains_key(&OutputType::Metadata) { - let out_filename = filename_for_metadata(sess, crate_name, outputs); - // To avoid races with another rustc process scanning the output directory, - // we need to write the file somewhere else and atomically move it to its - // final destination, with a `fs::rename` call. In order for the rename to - // always succeed, the temporary file needs to be on the same filesystem, - // which is why we create it inside the output directory specifically. - let metadata_tmpdir = TempFileBuilder::new() - .prefix("rmeta") - .tempdir_in(out_filename.parent().unwrap()) - .unwrap_or_else(|err| sess.fatal(&format!("couldn't create a temp dir: {}", err))); - let metadata = emit_metadata(sess, codegen_results, &metadata_tmpdir); - if let Err(e) = fs::rename(metadata, &out_filename) { - sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e)); - } - out_filenames.push(out_filename); - } - - let tmpdir = TempFileBuilder::new().prefix("rustc").tempdir().unwrap_or_else(|err| - sess.fatal(&format!("couldn't create a temp dir: {}", err))); - - if outputs.outputs.should_codegen() { - let out_filename = out_filename(sess, crate_type, outputs, crate_name); - match crate_type { - config::CrateType::Rlib => { - link_rlib(sess, - codegen_results, - RlibFlavor::Normal, - &out_filename, - &tmpdir).build(); - } - config::CrateType::Staticlib => { - link_staticlib(sess, codegen_results, &out_filename, &tmpdir); - } - _ => { - link_natively(sess, crate_type, &out_filename, codegen_results, tmpdir.path()); - } - } - out_filenames.push(out_filename); - } - - if sess.opts.cg.save_temps { - let _ = tmpdir.into_path(); - } - - out_filenames -} - -fn archive_search_paths(sess: &Session) -> Vec { - sess.target_filesearch(PathKind::Native).search_path_dirs() -} - -fn archive_config<'a>(sess: &'a Session, - output: &Path, - input: Option<&Path>) -> ArchiveConfig<'a> { - ArchiveConfig { - sess, - dst: output.to_path_buf(), - src: input.map(|p| p.to_path_buf()), - lib_search_paths: archive_search_paths(sess), - } -} - -/// We use a temp directory here to avoid races between concurrent rustc processes, -/// such as builds in the same directory using the same filename for metadata while -/// building an `.rlib` (stomping over one another), or writing an `.rmeta` into a -/// directory being searched for `extern crate` (observing an incomplete file). -/// The returned path is the temporary file containing the complete metadata. -fn emit_metadata<'a>( - sess: &'a Session, - codegen_results: &CodegenResults, - tmpdir: &TempDir -) -> PathBuf { - let out_filename = tmpdir.path().join(METADATA_FILENAME); - let result = fs::write(&out_filename, &codegen_results.metadata.raw_data); - - if let Err(e) = result { - sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e)); - } - - out_filename -} - -enum RlibFlavor { - Normal, - StaticlibBase, -} - -// Create an 'rlib' -// -// An rlib in its current incarnation is essentially a renamed .a file. The -// rlib primarily contains the object file of the crate, but it also contains -// all of the object files from native libraries. This is done by unzipping -// native libraries and inserting all of the contents into this archive. -fn link_rlib<'a>(sess: &'a Session, - codegen_results: &CodegenResults, - flavor: RlibFlavor, - out_filename: &Path, - tmpdir: &TempDir) -> ArchiveBuilder<'a> { - info!("preparing rlib to {:?}", out_filename); - let mut ab = ArchiveBuilder::new(archive_config(sess, out_filename, None)); - - for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { - ab.add_file(obj); - } - - // Note that in this loop we are ignoring the value of `lib.cfg`. That is, - // we may not be configured to actually include a static library if we're - // adding it here. That's because later when we consume this rlib we'll - // decide whether we actually needed the static library or not. - // - // To do this "correctly" we'd need to keep track of which libraries added - // which object files to the archive. We don't do that here, however. The - // #[link(cfg(..))] feature is unstable, though, and only intended to get - // liblibc working. In that sense the check below just indicates that if - // there are any libraries we want to omit object files for at link time we - // just exclude all custom object files. - // - // Eventually if we want to stabilize or flesh out the #[link(cfg(..))] - // feature then we'll need to figure out how to record what objects were - // loaded from the libraries found here and then encode that into the - // metadata of the rlib we're generating somehow. - for lib in codegen_results.crate_info.used_libraries.iter() { - match lib.kind { - NativeLibraryKind::NativeStatic => {} - NativeLibraryKind::NativeStaticNobundle | - NativeLibraryKind::NativeFramework | - NativeLibraryKind::NativeUnknown => continue, - } - if let Some(name) = lib.name { - ab.add_native_library(&name.as_str()); - } - } - - // After adding all files to the archive, we need to update the - // symbol table of the archive. - ab.update_symbols(); - - // Note that it is important that we add all of our non-object "magical - // files" *after* all of the object files in the archive. The reason for - // this is as follows: - // - // * When performing LTO, this archive will be modified to remove - // objects from above. The reason for this is described below. - // - // * When the system linker looks at an archive, it will attempt to - // determine the architecture of the archive in order to see whether its - // linkable. - // - // The algorithm for this detection is: iterate over the files in the - // archive. Skip magical SYMDEF names. Interpret the first file as an - // object file. Read architecture from the object file. - // - // * As one can probably see, if "metadata" and "foo.bc" were placed - // before all of the objects, then the architecture of this archive would - // not be correctly inferred once 'foo.o' is removed. - // - // Basically, all this means is that this code should not move above the - // code above. - match flavor { - RlibFlavor::Normal => { - // Instead of putting the metadata in an object file section, rlibs - // contain the metadata in a separate file. - ab.add_file(&emit_metadata(sess, codegen_results, tmpdir)); - - // For LTO purposes, the bytecode of this library is also inserted - // into the archive. - for bytecode in codegen_results - .modules - .iter() - .filter_map(|m| m.bytecode_compressed.as_ref()) - { - ab.add_file(bytecode); - } - - // After adding all files to the archive, we need to update the - // symbol table of the archive. This currently dies on macOS (see - // #11162), and isn't necessary there anyway - if !sess.target.target.options.is_like_osx { - ab.update_symbols(); - } - } - - RlibFlavor::StaticlibBase => { - let obj = codegen_results.allocator_module - .as_ref() - .and_then(|m| m.object.as_ref()); - if let Some(obj) = obj { - ab.add_file(obj); - } - } - } - - ab -} - -// Create a static archive -// -// This is essentially the same thing as an rlib, but it also involves adding -// all of the upstream crates' objects into the archive. This will slurp in -// all of the native libraries of upstream dependencies as well. -// -// Additionally, there's no way for us to link dynamic libraries, so we warn -// about all dynamic library dependencies that they're not linked in. -// -// There's no need to include metadata in a static archive, so ensure to not -// link in the metadata object file (and also don't prepare the archive with a -// metadata file). -fn link_staticlib(sess: &Session, - codegen_results: &CodegenResults, - out_filename: &Path, - tempdir: &TempDir) { - let mut ab = link_rlib(sess, - codegen_results, - RlibFlavor::StaticlibBase, - out_filename, - tempdir); - let mut all_native_libs = vec![]; - - let res = each_linked_rlib(sess, &codegen_results.crate_info, &mut |cnum, path| { - let name = &codegen_results.crate_info.crate_name[&cnum]; - let native_libs = &codegen_results.crate_info.native_libraries[&cnum]; - - // Here when we include the rlib into our staticlib we need to make a - // decision whether to include the extra object files along the way. - // These extra object files come from statically included native - // libraries, but they may be cfg'd away with #[link(cfg(..))]. - // - // This unstable feature, though, only needs liblibc to work. The only - // use case there is where musl is statically included in liblibc.rlib, - // so if we don't want the included version we just need to skip it. As - // a result the logic here is that if *any* linked library is cfg'd away - // we just skip all object files. - // - // Clearly this is not sufficient for a general purpose feature, and - // we'd want to read from the library's metadata to determine which - // object files come from where and selectively skip them. - let skip_object_files = native_libs.iter().any(|lib| { - lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) - }); - ab.add_rlib(path, - &name.as_str(), - are_upstream_rust_objects_already_included(sess) && - !ignored_for_lto(sess, &codegen_results.crate_info, cnum), - skip_object_files).unwrap(); - - all_native_libs.extend(codegen_results.crate_info.native_libraries[&cnum].iter().cloned()); - }); - if let Err(e) = res { - sess.fatal(&e); - } - - ab.update_symbols(); - ab.build(); - - if !all_native_libs.is_empty() { - if sess.opts.prints.contains(&PrintRequest::NativeStaticLibs) { - print_native_static_libs(sess, &all_native_libs); - } - } -} - -fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLibrary]) { - let lib_args: Vec<_> = all_native_libs.iter() - .filter(|l| relevant_lib(sess, l)) - .filter_map(|lib| { - let name = lib.name?; - match lib.kind { - NativeLibraryKind::NativeStaticNobundle | - NativeLibraryKind::NativeUnknown => { - if sess.target.target.options.is_like_msvc { - Some(format!("{}.lib", name)) - } else { - Some(format!("-l{}", name)) - } - }, - NativeLibraryKind::NativeFramework => { - // ld-only syntax, since there are no frameworks in MSVC - Some(format!("-framework {}", name)) - }, - // These are included, no need to print them - NativeLibraryKind::NativeStatic => None, - } - }) - .collect(); - if !lib_args.is_empty() { - sess.note_without_error("Link against the following native artifacts when linking \ - against this static library. The order and any duplication \ - can be significant on some platforms."); - // Prefix for greppability - sess.note_without_error(&format!("native-static-libs: {}", &lib_args.join(" "))); - } -} - -fn get_file_path(sess: &Session, name: &str) -> PathBuf { - let fs = sess.target_filesearch(PathKind::Native); - let file_path = fs.get_lib_path().join(name); - if file_path.exists() { - return file_path - } - for search_path in fs.search_paths() { - let file_path = search_path.dir.join(name); - if file_path.exists() { - return file_path - } - } - PathBuf::from(name) -} - -// Create a dynamic library or executable -// -// This will invoke the system linker/cc to create the resulting file. This -// links to all upstream files as well. -fn link_natively(sess: &Session, - crate_type: config::CrateType, - out_filename: &Path, - codegen_results: &CodegenResults, - tmpdir: &Path) { - info!("preparing {:?} to {:?}", crate_type, out_filename); - let (linker, flavor) = linker_and_flavor(sess); - - // The invocations of cc share some flags across platforms - let (pname, mut cmd) = get_linker(sess, &linker, flavor); - - if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) { - cmd.args(args); - } - if let Some(args) = sess.target.target.options.pre_link_args_crt.get(&flavor) { - if sess.crt_static() { - cmd.args(args); - } - } - if let Some(ref args) = sess.opts.debugging_opts.pre_link_args { - cmd.args(args); - } - cmd.args(&sess.opts.debugging_opts.pre_link_arg); - - if sess.target.target.options.is_like_fuchsia { - let prefix = match sess.opts.debugging_opts.sanitizer { - Some(Sanitizer::Address) => "asan/", - _ => "", - }; - cmd.arg(format!("--dynamic-linker={}ld.so.1", prefix)); - } - - let pre_link_objects = if crate_type == config::CrateType::Executable { - &sess.target.target.options.pre_link_objects_exe - } else { - &sess.target.target.options.pre_link_objects_dll - }; - for obj in pre_link_objects { - cmd.arg(get_file_path(sess, obj)); - } - - if crate_type == config::CrateType::Executable && sess.crt_static() { - for obj in &sess.target.target.options.pre_link_objects_exe_crt { - cmd.arg(get_file_path(sess, obj)); - } - } - - if sess.target.target.options.is_like_emscripten { - cmd.arg("-s"); - cmd.arg(if sess.panic_strategy() == PanicStrategy::Abort { - "DISABLE_EXCEPTION_CATCHING=1" - } else { - "DISABLE_EXCEPTION_CATCHING=0" - }); - } - - { - let target_cpu = crate::llvm_util::target_cpu(sess); - let mut linker = codegen_results.linker_info.to_linker(cmd, &sess, flavor, target_cpu); - link_args(&mut *linker, flavor, sess, crate_type, tmpdir, - out_filename, codegen_results); - cmd = linker.finalize(); - } - if let Some(args) = sess.target.target.options.late_link_args.get(&flavor) { - cmd.args(args); - } - for obj in &sess.target.target.options.post_link_objects { - cmd.arg(get_file_path(sess, obj)); - } - if sess.crt_static() { - for obj in &sess.target.target.options.post_link_objects_crt { - cmd.arg(get_file_path(sess, obj)); - } - } - if let Some(args) = sess.target.target.options.post_link_args.get(&flavor) { - cmd.args(args); - } - for &(ref k, ref v) in &sess.target.target.options.link_env { - cmd.env(k, v); - } - - if sess.opts.debugging_opts.print_link_args { - println!("{:?}", &cmd); - } - - // May have not found libraries in the right formats. - sess.abort_if_errors(); - - // Invoke the system linker - // - // Note that there's a terribly awful hack that really shouldn't be present - // in any compiler. Here an environment variable is supported to - // automatically retry the linker invocation if the linker looks like it - // segfaulted. - // - // Gee that seems odd, normally segfaults are things we want to know about! - // Unfortunately though in rust-lang/rust#38878 we're experiencing the - // linker segfaulting on Travis quite a bit which is causing quite a bit of - // pain to land PRs when they spuriously fail due to a segfault. - // - // The issue #38878 has some more debugging information on it as well, but - // this unfortunately looks like it's just a race condition in macOS's linker - // with some thread pool working in the background. It seems that no one - // currently knows a fix for this so in the meantime we're left with this... - info!("{:?}", &cmd); - let retry_on_segfault = env::var("RUSTC_RETRY_LINKER_ON_SEGFAULT").is_ok(); - let mut prog; - let mut i = 0; - loop { - i += 1; - prog = time(sess, "running linker", || { - exec_linker(sess, &mut cmd, out_filename, tmpdir) - }); - let output = match prog { - Ok(ref output) => output, - Err(_) => break, - }; - if output.status.success() { - break - } - let mut out = output.stderr.clone(); - out.extend(&output.stdout); - let out = String::from_utf8_lossy(&out); - - // Check to see if the link failed with "unrecognized command line option: - // '-no-pie'" for gcc or "unknown argument: '-no-pie'" for clang. If so, - // reperform the link step without the -no-pie option. This is safe because - // if the linker doesn't support -no-pie then it should not default to - // linking executables as pie. Different versions of gcc seem to use - // different quotes in the error message so don't check for them. - if sess.target.target.options.linker_is_gnu && - flavor != LinkerFlavor::Ld && - (out.contains("unrecognized command line option") || - out.contains("unknown argument")) && - out.contains("-no-pie") && - cmd.get_args().iter().any(|e| e.to_string_lossy() == "-no-pie") { - info!("linker output: {:?}", out); - warn!("Linker does not support -no-pie command line option. Retrying without."); - for arg in cmd.take_args() { - if arg.to_string_lossy() != "-no-pie" { - cmd.arg(arg); - } - } - info!("{:?}", &cmd); - continue; - } - if !retry_on_segfault || i > 3 { - break - } - let msg_segv = "clang: error: unable to execute command: Segmentation fault: 11"; - let msg_bus = "clang: error: unable to execute command: Bus error: 10"; - if !(out.contains(msg_segv) || out.contains(msg_bus)) { - break - } - - warn!( - "looks like the linker segfaulted when we tried to call it, \ - automatically retrying again. cmd = {:?}, out = {}.", - cmd, - out, - ); - } - - match prog { - Ok(prog) => { - fn escape_string(s: &[u8]) -> String { - str::from_utf8(s).map(|s| s.to_owned()) - .unwrap_or_else(|_| { - let mut x = "Non-UTF-8 output: ".to_string(); - x.extend(s.iter() - .flat_map(|&b| ascii::escape_default(b)) - .map(char::from)); - x - }) - } - if !prog.status.success() { - let mut output = prog.stderr.clone(); - output.extend_from_slice(&prog.stdout); - sess.struct_err(&format!("linking with `{}` failed: {}", - pname.display(), - prog.status)) - .note(&format!("{:?}", &cmd)) - .note(&escape_string(&output)) - .emit(); - sess.abort_if_errors(); - } - info!("linker stderr:\n{}", escape_string(&prog.stderr)); - info!("linker stdout:\n{}", escape_string(&prog.stdout)); - }, - Err(e) => { - let linker_not_found = e.kind() == io::ErrorKind::NotFound; - - let mut linker_error = { - if linker_not_found { - sess.struct_err(&format!("linker `{}` not found", pname.display())) - } else { - sess.struct_err(&format!("could not exec the linker `{}`", pname.display())) - } - }; - - linker_error.note(&e.to_string()); - - if !linker_not_found { - linker_error.note(&format!("{:?}", &cmd)); - } - - linker_error.emit(); - - if sess.target.target.options.is_like_msvc && linker_not_found { - sess.note_without_error("the msvc targets depend on the msvc linker \ - but `link.exe` was not found"); - sess.note_without_error("please ensure that VS 2013, VS 2015 or VS 2017 \ - was installed with the Visual C++ option"); - } - sess.abort_if_errors(); - } - } - - - // On macOS, debuggers need this utility to get run to do some munging of - // the symbols. Note, though, that if the object files are being preserved - // for their debug information there's no need for us to run dsymutil. - if sess.target.target.options.is_like_osx && - sess.opts.debuginfo != DebugInfo::None && - !preserve_objects_for_their_debuginfo(sess) - { - if let Err(e) = Command::new("dsymutil").arg(out_filename).output() { - sess.fatal(&format!("failed to run dsymutil: {}", e)) - } - } - - if sess.opts.target_triple.triple() == "wasm32-unknown-unknown" { - wasm::add_producer_section( - &out_filename, - &sess.edition().to_string(), - option_env!("CFG_VERSION").unwrap_or("unknown"), - ); - } -} - -fn exec_linker(sess: &Session, cmd: &mut Command, out_filename: &Path, tmpdir: &Path) - -> io::Result -{ - // When attempting to spawn the linker we run a risk of blowing out the - // size limits for spawning a new process with respect to the arguments - // we pass on the command line. - // - // Here we attempt to handle errors from the OS saying "your list of - // arguments is too big" by reinvoking the linker again with an `@`-file - // that contains all the arguments. The theory is that this is then - // accepted on all linkers and the linker will read all its options out of - // there instead of looking at the command line. - if !cmd.very_likely_to_exceed_some_spawn_limit() { - match cmd.command().stdout(Stdio::piped()).stderr(Stdio::piped()).spawn() { - Ok(child) => { - let output = child.wait_with_output(); - flush_linked_file(&output, out_filename)?; - return output; - } - Err(ref e) if command_line_too_big(e) => { - info!("command line to linker was too big: {}", e); - } - Err(e) => return Err(e) - } - } - - info!("falling back to passing arguments to linker via an @-file"); - let mut cmd2 = cmd.clone(); - let mut args = String::new(); - for arg in cmd2.take_args() { - args.push_str(&Escape { - arg: arg.to_str().unwrap(), - is_like_msvc: sess.target.target.options.is_like_msvc, - }.to_string()); - args.push_str("\n"); - } - let file = tmpdir.join("linker-arguments"); - let bytes = if sess.target.target.options.is_like_msvc { - let mut out = Vec::with_capacity((1 + args.len()) * 2); - // start the stream with a UTF-16 BOM - for c in iter::once(0xFEFF).chain(args.encode_utf16()) { - // encode in little endian - out.push(c as u8); - out.push((c >> 8) as u8); - } - out - } else { - args.into_bytes() - }; - fs::write(&file, &bytes)?; - cmd2.arg(format!("@{}", file.display())); - info!("invoking linker {:?}", cmd2); - let output = cmd2.output(); - flush_linked_file(&output, out_filename)?; - return output; - - #[cfg(unix)] - fn flush_linked_file(_: &io::Result, _: &Path) -> io::Result<()> { - Ok(()) - } - - #[cfg(windows)] - fn flush_linked_file(command_output: &io::Result, out_filename: &Path) - -> io::Result<()> - { - // On Windows, under high I/O load, output buffers are sometimes not flushed, - // even long after process exit, causing nasty, non-reproducible output bugs. - // - // File::sync_all() calls FlushFileBuffers() down the line, which solves the problem. - // - // А full writeup of the original Chrome bug can be found at - // randomascii.wordpress.com/2018/02/25/compiler-bug-linker-bug-windows-kernel-bug/amp - - if let &Ok(ref out) = command_output { - if out.status.success() { - if let Ok(of) = fs::OpenOptions::new().write(true).open(out_filename) { - of.sync_all()?; - } - } - } - - Ok(()) - } - - #[cfg(unix)] - fn command_line_too_big(err: &io::Error) -> bool { - err.raw_os_error() == Some(::libc::E2BIG) - } - - #[cfg(windows)] - fn command_line_too_big(err: &io::Error) -> bool { - const ERROR_FILENAME_EXCED_RANGE: i32 = 206; - err.raw_os_error() == Some(ERROR_FILENAME_EXCED_RANGE) - } - - struct Escape<'a> { - arg: &'a str, - is_like_msvc: bool, - } - - impl<'a> fmt::Display for Escape<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.is_like_msvc { - // This is "documented" at - // https://msdn.microsoft.com/en-us/library/4xdcbak7.aspx - // - // Unfortunately there's not a great specification of the - // syntax I could find online (at least) but some local - // testing showed that this seemed sufficient-ish to catch - // at least a few edge cases. - write!(f, "\"")?; - for c in self.arg.chars() { - match c { - '"' => write!(f, "\\{}", c)?, - c => write!(f, "{}", c)?, - } - } - write!(f, "\"")?; - } else { - // This is documented at https://linux.die.net/man/1/ld, namely: - // - // > Options in file are separated by whitespace. A whitespace - // > character may be included in an option by surrounding the - // > entire option in either single or double quotes. Any - // > character (including a backslash) may be included by - // > prefixing the character to be included with a backslash. - // - // We put an argument on each line, so all we need to do is - // ensure the line is interpreted as one whole argument. - for c in self.arg.chars() { - match c { - '\\' | ' ' => write!(f, "\\{}", c)?, - c => write!(f, "{}", c)?, - } - } - } - Ok(()) - } - } -} - -fn link_args(cmd: &mut dyn Linker, - flavor: LinkerFlavor, - sess: &Session, - crate_type: config::CrateType, - tmpdir: &Path, - out_filename: &Path, - codegen_results: &CodegenResults) { - - // Linker plugins should be specified early in the list of arguments - cmd.linker_plugin_lto(); - - // The default library location, we need this to find the runtime. - // The location of crates will be determined as needed. - let lib_path = sess.target_filesearch(PathKind::All).get_lib_path(); - - // target descriptor - let t = &sess.target.target; - - cmd.include_path(&fix_windows_verbatim_for_gcc(&lib_path)); - for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { - cmd.add_object(obj); - } - cmd.output_filename(out_filename); - - if crate_type == config::CrateType::Executable && - sess.target.target.options.is_like_windows { - if let Some(ref s) = codegen_results.windows_subsystem { - cmd.subsystem(s); - } - } - - // If we're building a dynamic library then some platforms need to make sure - // that all symbols are exported correctly from the dynamic library. - if crate_type != config::CrateType::Executable || - sess.target.target.options.is_like_emscripten { - cmd.export_symbols(tmpdir, crate_type); - } - - // When linking a dynamic library, we put the metadata into a section of the - // executable. This metadata is in a separate object file from the main - // object file, so we link that in here. - if crate_type == config::CrateType::Dylib || - crate_type == config::CrateType::ProcMacro { - if let Some(obj) = codegen_results.metadata_module.object.as_ref() { - cmd.add_object(obj); - } - } - - let obj = codegen_results.allocator_module - .as_ref() - .and_then(|m| m.object.as_ref()); - if let Some(obj) = obj { - cmd.add_object(obj); - } - - // Try to strip as much out of the generated object by removing unused - // sections if possible. See more comments in linker.rs - if !sess.opts.cg.link_dead_code { - let keep_metadata = crate_type == config::CrateType::Dylib; - cmd.gc_sections(keep_metadata); - } - - let used_link_args = &codegen_results.crate_info.link_args; - - if crate_type == config::CrateType::Executable { - let mut position_independent_executable = false; - - if t.options.position_independent_executables { - let empty_vec = Vec::new(); - let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec); - let more_args = &sess.opts.cg.link_arg; - let mut args = args.iter().chain(more_args.iter()).chain(used_link_args.iter()); - - if get_reloc_model(sess) == llvm::RelocMode::PIC - && !sess.crt_static() && !args.any(|x| *x == "-static") { - position_independent_executable = true; - } - } - - if position_independent_executable { - cmd.position_independent_executable(); - } else { - // recent versions of gcc can be configured to generate position - // independent executables by default. We have to pass -no-pie to - // explicitly turn that off. Not applicable to ld. - if sess.target.target.options.linker_is_gnu - && flavor != LinkerFlavor::Ld { - cmd.no_position_independent_executable(); - } - } - } - - let relro_level = match sess.opts.debugging_opts.relro_level { - Some(level) => level, - None => t.options.relro_level, - }; - match relro_level { - RelroLevel::Full => { - cmd.full_relro(); - }, - RelroLevel::Partial => { - cmd.partial_relro(); - }, - RelroLevel::Off => { - cmd.no_relro(); - }, - RelroLevel::None => { - }, - } - - // Pass optimization flags down to the linker. - cmd.optimize(); - - // Pass debuginfo flags down to the linker. - cmd.debuginfo(); - - // We want to, by default, prevent the compiler from accidentally leaking in - // any system libraries, so we may explicitly ask linkers to not link to any - // libraries by default. Note that this does not happen for windows because - // windows pulls in some large number of libraries and I couldn't quite - // figure out which subset we wanted. - // - // This is all naturally configurable via the standard methods as well. - if !sess.opts.cg.default_linker_libraries.unwrap_or(false) && - t.options.no_default_libraries - { - cmd.no_default_libraries(); - } - - // Take careful note of the ordering of the arguments we pass to the linker - // here. Linkers will assume that things on the left depend on things to the - // right. Things on the right cannot depend on things on the left. This is - // all formally implemented in terms of resolving symbols (libs on the right - // resolve unknown symbols of libs on the left, but not vice versa). - // - // For this reason, we have organized the arguments we pass to the linker as - // such: - // - // 1. The local object that LLVM just generated - // 2. Local native libraries - // 3. Upstream rust libraries - // 4. Upstream native libraries - // - // The rationale behind this ordering is that those items lower down in the - // list can't depend on items higher up in the list. For example nothing can - // depend on what we just generated (e.g., that'd be a circular dependency). - // Upstream rust libraries are not allowed to depend on our local native - // libraries as that would violate the structure of the DAG, in that - // scenario they are required to link to them as well in a shared fashion. - // - // Note that upstream rust libraries may contain native dependencies as - // well, but they also can't depend on what we just started to add to the - // link line. And finally upstream native libraries can't depend on anything - // in this DAG so far because they're only dylibs and dylibs can only depend - // on other dylibs (e.g., other native deps). - add_local_native_libraries(cmd, sess, codegen_results); - add_upstream_rust_crates(cmd, sess, codegen_results, crate_type, tmpdir); - add_upstream_native_libraries(cmd, sess, codegen_results, crate_type); - - // Tell the linker what we're doing. - if crate_type != config::CrateType::Executable { - cmd.build_dylib(out_filename); - } - if crate_type == config::CrateType::Executable && sess.crt_static() { - cmd.build_static_executable(); - } - - if sess.opts.debugging_opts.pgo_gen.is_some() { - cmd.pgo_gen(); - } - - // FIXME (#2397): At some point we want to rpath our guesses as to - // where extern libraries might live, based on the - // addl_lib_search_paths - if sess.opts.cg.rpath { - let target_triple = sess.opts.target_triple.triple(); - let mut get_install_prefix_lib_path = || { - let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); - let tlib = filesearch::relative_target_lib_path(&sess.sysroot, target_triple); - let mut path = PathBuf::from(install_prefix); - path.push(&tlib); - - path - }; - let mut rpath_config = RPathConfig { - used_crates: &codegen_results.crate_info.used_crates_dynamic, - out_filename: out_filename.to_path_buf(), - has_rpath: sess.target.target.options.has_rpath, - is_like_osx: sess.target.target.options.is_like_osx, - linker_is_gnu: sess.target.target.options.linker_is_gnu, - get_install_prefix_lib_path: &mut get_install_prefix_lib_path, - }; - cmd.args(&rpath::get_rpath_flags(&mut rpath_config)); - } - - // Finally add all the linker arguments provided on the command line along - // with any #[link_args] attributes found inside the crate - if let Some(ref args) = sess.opts.cg.link_args { - cmd.args(args); - } - cmd.args(&sess.opts.cg.link_arg); - cmd.args(&used_link_args); -} - -// # Native library linking -// -// User-supplied library search paths (-L on the command line). These are -// the same paths used to find Rust crates, so some of them may have been -// added already by the previous crate linking code. This only allows them -// to be found at compile time so it is still entirely up to outside -// forces to make sure that library can be found at runtime. -// -// Also note that the native libraries linked here are only the ones located -// in the current crate. Upstream crates with native library dependencies -// may have their native library pulled in above. -fn add_local_native_libraries(cmd: &mut dyn Linker, - sess: &Session, - codegen_results: &CodegenResults) { - let filesearch = sess.target_filesearch(PathKind::All); - for search_path in filesearch.search_paths() { - match search_path.kind { - PathKind::Framework => { cmd.framework_path(&search_path.dir); } - _ => { cmd.include_path(&fix_windows_verbatim_for_gcc(&search_path.dir)); } - } - } - - let relevant_libs = codegen_results.crate_info.used_libraries.iter().filter(|l| { - relevant_lib(sess, l) - }); - - let search_path = archive_search_paths(sess); - for lib in relevant_libs { - let name = match lib.name { - Some(ref l) => l, - None => continue, - }; - match lib.kind { - NativeLibraryKind::NativeUnknown => cmd.link_dylib(&name.as_str()), - NativeLibraryKind::NativeFramework => cmd.link_framework(&name.as_str()), - NativeLibraryKind::NativeStaticNobundle => cmd.link_staticlib(&name.as_str()), - NativeLibraryKind::NativeStatic => cmd.link_whole_staticlib(&name.as_str(), - &search_path) - } - } -} - -// # Rust Crate linking -// -// Rust crates are not considered at all when creating an rlib output. All -// dependencies will be linked when producing the final output (instead of -// the intermediate rlib version) -fn add_upstream_rust_crates(cmd: &mut dyn Linker, - sess: &Session, - codegen_results: &CodegenResults, - crate_type: config::CrateType, - tmpdir: &Path) { - // All of the heavy lifting has previously been accomplished by the - // dependency_format module of the compiler. This is just crawling the - // output of that module, adding crates as necessary. - // - // Linking to a rlib involves just passing it to the linker (the linker - // will slurp up the object files inside), and linking to a dynamic library - // involves just passing the right -l flag. - - let formats = sess.dependency_formats.borrow(); - let data = formats.get(&crate_type).unwrap(); - - // Invoke get_used_crates to ensure that we get a topological sorting of - // crates. - let deps = &codegen_results.crate_info.used_crates_dynamic; - - // There's a few internal crates in the standard library (aka libcore and - // libstd) which actually have a circular dependence upon one another. This - // currently arises through "weak lang items" where libcore requires things - // like `rust_begin_unwind` but libstd ends up defining it. To get this - // circular dependence to work correctly in all situations we'll need to be - // sure to correctly apply the `--start-group` and `--end-group` options to - // GNU linkers, otherwise if we don't use any other symbol from the standard - // library it'll get discarded and the whole application won't link. - // - // In this loop we're calculating the `group_end`, after which crate to - // pass `--end-group` and `group_start`, before which crate to pass - // `--start-group`. We currently do this by passing `--end-group` after - // the first crate (when iterating backwards) that requires a lang item - // defined somewhere else. Once that's set then when we've defined all the - // necessary lang items we'll pass `--start-group`. - // - // Note that this isn't amazing logic for now but it should do the trick - // for the current implementation of the standard library. - let mut group_end = None; - let mut group_start = None; - let mut end_with = FxHashSet::default(); - let info = &codegen_results.crate_info; - for &(cnum, _) in deps.iter().rev() { - if let Some(missing) = info.missing_lang_items.get(&cnum) { - end_with.extend(missing.iter().cloned()); - if end_with.len() > 0 && group_end.is_none() { - group_end = Some(cnum); - } - } - end_with.retain(|item| info.lang_item_to_crate.get(item) != Some(&cnum)); - if end_with.len() == 0 && group_end.is_some() { - group_start = Some(cnum); - break - } - } - - // If we didn't end up filling in all lang items from upstream crates then - // we'll be filling it in with our crate. This probably means we're the - // standard library itself, so skip this for now. - if group_end.is_some() && group_start.is_none() { - group_end = None; - } - - let mut compiler_builtins = None; - - for &(cnum, _) in deps.iter() { - if group_start == Some(cnum) { - cmd.group_start(); - } - - // We may not pass all crates through to the linker. Some crates may - // appear statically in an existing dylib, meaning we'll pick up all the - // symbols from the dylib. - let src = &codegen_results.crate_info.used_crate_source[&cnum]; - match data[cnum.as_usize() - 1] { - _ if codegen_results.crate_info.profiler_runtime == Some(cnum) => { - add_static_crate(cmd, sess, codegen_results, tmpdir, crate_type, cnum); - } - _ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) => { - link_sanitizer_runtime(cmd, sess, codegen_results, tmpdir, cnum); - } - // compiler-builtins are always placed last to ensure that they're - // linked correctly. - _ if codegen_results.crate_info.compiler_builtins == Some(cnum) => { - assert!(compiler_builtins.is_none()); - compiler_builtins = Some(cnum); - } - Linkage::NotLinked | - Linkage::IncludedFromDylib => {} - Linkage::Static => { - add_static_crate(cmd, sess, codegen_results, tmpdir, crate_type, cnum); - } - Linkage::Dynamic => { - add_dynamic_crate(cmd, sess, &src.dylib.as_ref().unwrap().0) - } - } - - if group_end == Some(cnum) { - cmd.group_end(); - } - } - - // compiler-builtins are always placed last to ensure that they're - // linked correctly. - // We must always link the `compiler_builtins` crate statically. Even if it - // was already "included" in a dylib (e.g., `libstd` when `-C prefer-dynamic` - // is used) - if let Some(cnum) = compiler_builtins { - add_static_crate(cmd, sess, codegen_results, tmpdir, crate_type, cnum); - } - - // Converts a library file-stem into a cc -l argument - fn unlib<'a>(config: &config::Config, stem: &'a str) -> &'a str { - if stem.starts_with("lib") && !config.target.options.is_like_windows { - &stem[3..] - } else { - stem - } - } - - // We must link the sanitizer runtime using -Wl,--whole-archive but since - // it's packed in a .rlib, it contains stuff that are not objects that will - // make the linker error. So we must remove those bits from the .rlib before - // linking it. - fn link_sanitizer_runtime(cmd: &mut dyn Linker, - sess: &Session, - codegen_results: &CodegenResults, - tmpdir: &Path, - cnum: CrateNum) { - let src = &codegen_results.crate_info.used_crate_source[&cnum]; - let cratepath = &src.rlib.as_ref().unwrap().0; - - if sess.target.target.options.is_like_osx { - // On Apple platforms, the sanitizer is always built as a dylib, and - // LLVM will link to `@rpath/*.dylib`, so we need to specify an - // rpath to the library as well (the rpath should be absolute, see - // PR #41352 for details). - // - // FIXME: Remove this logic into librustc_*san once Cargo supports it - let rpath = cratepath.parent().unwrap(); - let rpath = rpath.to_str().expect("non-utf8 component in path"); - cmd.args(&["-Wl,-rpath".into(), "-Xlinker".into(), rpath.into()]); - } - - let dst = tmpdir.join(cratepath.file_name().unwrap()); - let cfg = archive_config(sess, &dst, Some(cratepath)); - let mut archive = ArchiveBuilder::new(cfg); - archive.update_symbols(); - - for f in archive.src_files() { - if f.ends_with(RLIB_BYTECODE_EXTENSION) || f == METADATA_FILENAME { - archive.remove_file(&f); - } - } - - archive.build(); - - cmd.link_whole_rlib(&dst); - } - - // Adds the static "rlib" versions of all crates to the command line. - // There's a bit of magic which happens here specifically related to LTO and - // dynamic libraries. Specifically: - // - // * For LTO, we remove upstream object files. - // * For dylibs we remove metadata and bytecode from upstream rlibs - // - // When performing LTO, almost(*) all of the bytecode from the upstream - // libraries has already been included in our object file output. As a - // result we need to remove the object files in the upstream libraries so - // the linker doesn't try to include them twice (or whine about duplicate - // symbols). We must continue to include the rest of the rlib, however, as - // it may contain static native libraries which must be linked in. - // - // (*) Crates marked with `#![no_builtins]` don't participate in LTO and - // their bytecode wasn't included. The object files in those libraries must - // still be passed to the linker. - // - // When making a dynamic library, linkers by default don't include any - // object files in an archive if they're not necessary to resolve the link. - // We basically want to convert the archive (rlib) to a dylib, though, so we - // *do* want everything included in the output, regardless of whether the - // linker thinks it's needed or not. As a result we must use the - // --whole-archive option (or the platform equivalent). When using this - // option the linker will fail if there are non-objects in the archive (such - // as our own metadata and/or bytecode). All in all, for rlibs to be - // entirely included in dylibs, we need to remove all non-object files. - // - // Note, however, that if we're not doing LTO or we're not producing a dylib - // (aka we're making an executable), we can just pass the rlib blindly to - // the linker (fast) because it's fine if it's not actually included as - // we're at the end of the dependency chain. - fn add_static_crate(cmd: &mut dyn Linker, - sess: &Session, - codegen_results: &CodegenResults, - tmpdir: &Path, - crate_type: config::CrateType, - cnum: CrateNum) { - let src = &codegen_results.crate_info.used_crate_source[&cnum]; - let cratepath = &src.rlib.as_ref().unwrap().0; - - // See the comment above in `link_staticlib` and `link_rlib` for why if - // there's a static library that's not relevant we skip all object - // files. - let native_libs = &codegen_results.crate_info.native_libraries[&cnum]; - let skip_native = native_libs.iter().any(|lib| { - lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) - }); - - if (!are_upstream_rust_objects_already_included(sess) || - ignored_for_lto(sess, &codegen_results.crate_info, cnum)) && - crate_type != config::CrateType::Dylib && - !skip_native { - cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath)); - return - } - - let dst = tmpdir.join(cratepath.file_name().unwrap()); - let name = cratepath.file_name().unwrap().to_str().unwrap(); - let name = &name[3..name.len() - 5]; // chop off lib/.rlib - - time_ext(sess.time_extended(), Some(sess), &format!("altering {}.rlib", name), || { - let cfg = archive_config(sess, &dst, Some(cratepath)); - let mut archive = ArchiveBuilder::new(cfg); - archive.update_symbols(); - - let mut any_objects = false; - for f in archive.src_files() { - if f.ends_with(RLIB_BYTECODE_EXTENSION) || f == METADATA_FILENAME { - archive.remove_file(&f); - continue - } - - let canonical = f.replace("-", "_"); - let canonical_name = name.replace("-", "_"); - - // Look for `.rcgu.o` at the end of the filename to conclude - // that this is a Rust-related object file. - fn looks_like_rust(s: &str) -> bool { - let path = Path::new(s); - let ext = path.extension().and_then(|s| s.to_str()); - if ext != Some(OutputType::Object.extension()) { - return false - } - let ext2 = path.file_stem() - .and_then(|s| Path::new(s).extension()) - .and_then(|s| s.to_str()); - ext2 == Some(RUST_CGU_EXT) - } - - let is_rust_object = - canonical.starts_with(&canonical_name) && - looks_like_rust(&f); - - // If we've been requested to skip all native object files - // (those not generated by the rust compiler) then we can skip - // this file. See above for why we may want to do this. - let skip_because_cfg_say_so = skip_native && !is_rust_object; - - // If we're performing LTO and this is a rust-generated object - // file, then we don't need the object file as it's part of the - // LTO module. Note that `#![no_builtins]` is excluded from LTO, - // though, so we let that object file slide. - let skip_because_lto = are_upstream_rust_objects_already_included(sess) && - is_rust_object && - (sess.target.target.options.no_builtins || - !codegen_results.crate_info.is_no_builtins.contains(&cnum)); - - if skip_because_cfg_say_so || skip_because_lto { - archive.remove_file(&f); - } else { - any_objects = true; - } - } - - if !any_objects { - return - } - archive.build(); - - // If we're creating a dylib, then we need to include the - // whole of each object in our archive into that artifact. This is - // because a `dylib` can be reused as an intermediate artifact. - // - // Note, though, that we don't want to include the whole of a - // compiler-builtins crate (e.g., compiler-rt) because it'll get - // repeatedly linked anyway. - if crate_type == config::CrateType::Dylib && - codegen_results.crate_info.compiler_builtins != Some(cnum) { - cmd.link_whole_rlib(&fix_windows_verbatim_for_gcc(&dst)); - } else { - cmd.link_rlib(&fix_windows_verbatim_for_gcc(&dst)); - } - }); - } - - // Same thing as above, but for dynamic crates instead of static crates. - fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) { - // Just need to tell the linker about where the library lives and - // what its name is - let parent = cratepath.parent(); - if let Some(dir) = parent { - cmd.include_path(&fix_windows_verbatim_for_gcc(dir)); - } - let filestem = cratepath.file_stem().unwrap().to_str().unwrap(); - cmd.link_rust_dylib(&unlib(&sess.target, filestem), - parent.unwrap_or(Path::new(""))); - } -} - -// Link in all of our upstream crates' native dependencies. Remember that -// all of these upstream native dependencies are all non-static -// dependencies. We've got two cases then: -// -// 1. The upstream crate is an rlib. In this case we *must* link in the -// native dependency because the rlib is just an archive. -// -// 2. The upstream crate is a dylib. In order to use the dylib, we have to -// have the dependency present on the system somewhere. Thus, we don't -// gain a whole lot from not linking in the dynamic dependency to this -// crate as well. -// -// The use case for this is a little subtle. In theory the native -// dependencies of a crate are purely an implementation detail of the crate -// itself, but the problem arises with generic and inlined functions. If a -// generic function calls a native function, then the generic function must -// be instantiated in the target crate, meaning that the native symbol must -// also be resolved in the target crate. -fn add_upstream_native_libraries(cmd: &mut dyn Linker, - sess: &Session, - codegen_results: &CodegenResults, - crate_type: config::CrateType) { - // Be sure to use a topological sorting of crates because there may be - // interdependencies between native libraries. When passing -nodefaultlibs, - // for example, almost all native libraries depend on libc, so we have to - // make sure that's all the way at the right (liblibc is near the base of - // the dependency chain). - // - // This passes RequireStatic, but the actual requirement doesn't matter, - // we're just getting an ordering of crate numbers, we're not worried about - // the paths. - let formats = sess.dependency_formats.borrow(); - let data = formats.get(&crate_type).unwrap(); - - let crates = &codegen_results.crate_info.used_crates_static; - for &(cnum, _) in crates { - for lib in codegen_results.crate_info.native_libraries[&cnum].iter() { - let name = match lib.name { - Some(ref l) => l, - None => continue, - }; - if !relevant_lib(sess, &lib) { - continue - } - match lib.kind { - NativeLibraryKind::NativeUnknown => cmd.link_dylib(&name.as_str()), - NativeLibraryKind::NativeFramework => cmd.link_framework(&name.as_str()), - NativeLibraryKind::NativeStaticNobundle => { - // Link "static-nobundle" native libs only if the crate they originate from - // is being linked statically to the current crate. If it's linked dynamically - // or is an rlib already included via some other dylib crate, the symbols from - // native libs will have already been included in that dylib. - if data[cnum.as_usize() - 1] == Linkage::Static { - cmd.link_staticlib(&name.as_str()) - } - }, - // ignore statically included native libraries here as we've - // already included them when we included the rust library - // previously - NativeLibraryKind::NativeStatic => {} - } - } - } -} - -fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool { - match lib.cfg { - Some(ref cfg) => attr::cfg_matches(cfg, &sess.parse_sess, None), - None => true, - } -} - -fn are_upstream_rust_objects_already_included(sess: &Session) -> bool { - match sess.lto() { - Lto::Fat => true, - Lto::Thin => { - // If we defer LTO to the linker, we haven't run LTO ourselves, so - // any upstream object files have not been copied yet. - !sess.opts.cg.linker_plugin_lto.enabled() - } - Lto::No | - Lto::ThinLocal => false, - } -} diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs index 84c652ff238af..af6c1f3bf2a71 100644 --- a/src/librustc_codegen_llvm/back/lto.rs +++ b/src/librustc_codegen_llvm/back/lto.rs @@ -1,4 +1,4 @@ -use crate::back::bytecode::{DecodedBytecode, RLIB_BYTECODE_EXTENSION}; +use crate::back::bytecode::DecodedBytecode; use crate::back::write::{self, DiagnosticHandlers, with_llvm_pmb, save_temp_bitcode, to_llvm_opt_settings}; use crate::llvm::archive_ro::ArchiveRO; @@ -17,7 +17,7 @@ use rustc::session::config::{self, Lto}; use rustc::util::common::time_ext; use rustc::util::profiling::ProfileCategory; use rustc_data_structures::fx::FxHashMap; -use rustc_codegen_ssa::{ModuleCodegen, ModuleKind}; +use rustc_codegen_ssa::{RLIB_BYTECODE_EXTENSION, ModuleCodegen, ModuleKind}; use std::ffi::{CStr, CString}; use std::ptr; diff --git a/src/librustc_codegen_llvm/back/write.rs b/src/librustc_codegen_llvm/back/write.rs index f0ed201ad5c27..0c15c0af09efb 100644 --- a/src/librustc_codegen_llvm/back/write.rs +++ b/src/librustc_codegen_llvm/back/write.rs @@ -1,5 +1,5 @@ use crate::attributes; -use crate::back::bytecode::{self, RLIB_BYTECODE_EXTENSION}; +use crate::back::bytecode; use crate::back::lto::ThinBuffer; use crate::base; use crate::consts; @@ -16,7 +16,7 @@ use rustc_codegen_ssa::traits::*; use rustc::session::config::{self, OutputType, Passes, Lto}; use rustc::session::Session; use rustc::ty::TyCtxt; -use rustc_codegen_ssa::{ModuleCodegen, CompiledModule}; +use rustc_codegen_ssa::{RLIB_BYTECODE_EXTENSION, ModuleCodegen, CompiledModule}; use rustc::util::common::time_ext; use rustc::util::profiling::ProfileCategory; use rustc_fs_util::{path_to_c_string, link_or_copy}; diff --git a/src/librustc_codegen_llvm/debuginfo/metadata.rs b/src/librustc_codegen_llvm/debuginfo/metadata.rs index 94d520ec78c71..31348b99c5af0 100644 --- a/src/librustc_codegen_llvm/debuginfo/metadata.rs +++ b/src/librustc_codegen_llvm/debuginfo/metadata.rs @@ -22,6 +22,7 @@ use rustc::hir::CodegenFnAttrFlags; use rustc::hir::def::CtorKind; use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE}; use rustc::ich::NodeIdHashingMode; +use rustc::mir::Field; use rustc::mir::interpret::truncate; use rustc_data_structures::fingerprint::Fingerprint; use rustc::ty::Instance; @@ -375,7 +376,7 @@ fn vec_slice_metadata( return_if_metadata_created_in_meantime!(cx, unique_type_id); - let slice_type_name = compute_debuginfo_type_name(cx, slice_ptr_type, true); + let slice_type_name = compute_debuginfo_type_name(cx.tcx, slice_ptr_type, true); let (pointer_size, pointer_align) = cx.size_and_align_of(data_ptr_type); let (usize_size, usize_align) = cx.size_and_align_of(cx.tcx.types.usize); @@ -478,7 +479,7 @@ fn trait_pointer_metadata( let trait_object_type = trait_object_type.unwrap_or(trait_type); let trait_type_name = - compute_debuginfo_type_name(cx, trait_object_type, false); + compute_debuginfo_type_name(cx.tcx, trait_object_type, false); let file_metadata = unknown_file_metadata(cx); @@ -865,7 +866,7 @@ fn foreign_type_metadata( ) -> &'ll DIType { debug!("foreign_type_metadata: {:?}", t); - let name = compute_debuginfo_type_name(cx, t, false); + let name = compute_debuginfo_type_name(cx.tcx, t, false); create_struct_stub(cx, t, &name, unique_type_id, NO_SCOPE_METADATA) } @@ -875,7 +876,7 @@ fn pointer_type_metadata( pointee_type_metadata: &'ll DIType, ) -> &'ll DIType { let (pointer_size, pointer_align) = cx.size_and_align_of(pointer_type); - let name = compute_debuginfo_type_name(cx, pointer_type, false); + let name = compute_debuginfo_type_name(cx.tcx, pointer_type, false); let name = SmallCStr::new(&name); unsafe { llvm::LLVMRustDIBuilderCreatePointerType( @@ -1071,7 +1072,7 @@ fn prepare_struct_metadata( unique_type_id: UniqueTypeId, span: Span, ) -> RecursiveTypeDescription<'ll, 'tcx> { - let struct_name = compute_debuginfo_type_name(cx, struct_type, false); + let struct_name = compute_debuginfo_type_name(cx.tcx, struct_type, false); let (struct_def_id, variant) = match struct_type.sty { ty::Adt(def, _) => (def.did, def.non_enum_variant()), @@ -1137,7 +1138,7 @@ fn prepare_tuple_metadata( unique_type_id: UniqueTypeId, span: Span, ) -> RecursiveTypeDescription<'ll, 'tcx> { - let tuple_name = compute_debuginfo_type_name(cx, tuple_type, false); + let tuple_name = compute_debuginfo_type_name(cx.tcx, tuple_type, false); let struct_stub = create_struct_stub(cx, tuple_type, @@ -1193,7 +1194,7 @@ fn prepare_union_metadata( unique_type_id: UniqueTypeId, span: Span, ) -> RecursiveTypeDescription<'ll, 'tcx> { - let union_name = compute_debuginfo_type_name(cx, union_type, false); + let union_name = compute_debuginfo_type_name(cx.tcx, union_type, false); let (union_def_id, variant) = match union_type.sty { ty::Adt(def, _) => (def.did, def.non_enum_variant()), @@ -1306,12 +1307,15 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { } layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, + discr_index, ref variants, .. } => { let discriminant_info = if fallback { - RegularDiscriminant(self.discriminant_type_metadata - .expect("")) + RegularDiscriminant { + discr_field: Field::from(discr_index), + discr_type_metadata: self.discriminant_type_metadata.unwrap() + } } else { // This doesn't matter in this case. NoDiscriminant @@ -1358,6 +1362,7 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { }, ref discr, ref variants, + discr_index, } => { if fallback { let variant = self.layout.for_variant(cx, dataful_variant); @@ -1403,8 +1408,8 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> { } compute_field_path(cx, &mut name, self.layout, - self.layout.fields.offset(0), - self.layout.field(cx, 0).size); + self.layout.fields.offset(discr_index), + self.layout.field(cx, discr_index).size); name.push_str(&adt.variants[*niche_variants.start()].ident.as_str()); // Create the (singleton) list of descriptions of union members. @@ -1486,6 +1491,8 @@ impl VariantMemberDescriptionFactory<'ll, 'tcx> { name: name.to_string(), type_metadata: if use_enum_fallback(cx) { match self.discriminant_type_metadata { + // Discriminant is always the first field of our variant + // when using the enum fallback. Some(metadata) if i == 0 => metadata, _ => type_metadata(cx, ty, self.span) } @@ -1504,7 +1511,7 @@ impl VariantMemberDescriptionFactory<'ll, 'tcx> { #[derive(Copy, Clone)] enum EnumDiscriminantInfo<'ll> { - RegularDiscriminant(&'ll DIType), + RegularDiscriminant{ discr_field: Field, discr_type_metadata: &'ll DIType }, OptimizedDiscriminant, NoDiscriminant } @@ -1535,15 +1542,26 @@ fn describe_enum_variant( unique_type_id, Some(containing_scope)); + let arg_name = |i: usize| { + if variant.ctor_kind == CtorKind::Fn { + format!("__{}", i) + } else { + variant.fields[i].ident.to_string() + } + }; + // Build an array of (field name, field type) pairs to be captured in the factory closure. let (offsets, args) = if use_enum_fallback(cx) { // If this is not a univariant enum, there is also the discriminant field. let (discr_offset, discr_arg) = match discriminant_info { - RegularDiscriminant(_) => { + RegularDiscriminant { discr_field, .. } => { // We have the layout of an enum variant, we need the layout of the outer enum let enum_layout = cx.layout_of(layout.ty); - (Some(enum_layout.fields.offset(0)), - Some(("RUST$ENUM$DISR".to_owned(), enum_layout.field(cx, 0).ty))) + let offset = enum_layout.fields.offset(discr_field.as_usize()); + let args = ( + "RUST$ENUM$DISR".to_owned(), + enum_layout.field(cx, discr_field.as_usize()).ty); + (Some(offset), Some(args)) } _ => (None, None), }; @@ -1552,12 +1570,7 @@ fn describe_enum_variant( layout.fields.offset(i) })).collect(), discr_arg.into_iter().chain((0..layout.fields.count()).map(|i| { - let name = if variant.ctor_kind == CtorKind::Fn { - format!("__{}", i) - } else { - variant.fields[i].ident.to_string() - }; - (name, layout.field(cx, i).ty) + (arg_name(i), layout.field(cx, i).ty) })).collect() ) } else { @@ -1566,12 +1579,7 @@ fn describe_enum_variant( layout.fields.offset(i) }).collect(), (0..layout.fields.count()).map(|i| { - let name = if variant.ctor_kind == CtorKind::Fn { - format!("__{}", i) - } else { - variant.fields[i].ident.to_string() - }; - (name, layout.field(cx, i).ty) + (arg_name(i), layout.field(cx, i).ty) }).collect() ) }; @@ -1581,8 +1589,8 @@ fn describe_enum_variant( offsets, args, discriminant_type_metadata: match discriminant_info { - RegularDiscriminant(discriminant_type_metadata) => { - Some(discriminant_type_metadata) + RegularDiscriminant { discr_type_metadata, .. } => { + Some(discr_type_metadata) } _ => None }, @@ -1599,7 +1607,7 @@ fn prepare_enum_metadata( unique_type_id: UniqueTypeId, span: Span, ) -> RecursiveTypeDescription<'ll, 'tcx> { - let enum_name = compute_debuginfo_type_name(cx, enum_type, false); + let enum_name = compute_debuginfo_type_name(cx.tcx, enum_type, false); let containing_scope = get_namespace_for_item(cx, enum_def_id); // FIXME: This should emit actual file metadata for the enum, but we @@ -1732,6 +1740,7 @@ fn prepare_enum_metadata( layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Niche { .. }, ref discr, + discr_index, .. } => { // Find the integer type of the correct size. @@ -1755,7 +1764,7 @@ fn prepare_enum_metadata( UNKNOWN_LINE_NUMBER, size.bits(), align.abi.bits() as u32, - layout.fields.offset(0).bits(), + layout.fields.offset(discr_index).bits(), DIFlags::FlagArtificial, discr_metadata)) } @@ -1764,6 +1773,7 @@ fn prepare_enum_metadata( layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, ref discr, + discr_index, .. } => { let discr_type = discr.value.to_ty(cx.tcx); @@ -1779,7 +1789,7 @@ fn prepare_enum_metadata( UNKNOWN_LINE_NUMBER, size.bits(), align.bits() as u32, - layout.fields.offset(0).bits(), + layout.fields.offset(discr_index).bits(), DIFlags::FlagArtificial, discr_metadata)) } diff --git a/src/librustc_codegen_llvm/debuginfo/mod.rs b/src/librustc_codegen_llvm/debuginfo/mod.rs index 57e4ac07d5e28..ae498673c1d84 100644 --- a/src/librustc_codegen_llvm/debuginfo/mod.rs +++ b/src/librustc_codegen_llvm/debuginfo/mod.rs @@ -29,7 +29,7 @@ use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet}; use rustc_data_structures::small_c_str::SmallCStr; use rustc_data_structures::indexed_vec::IndexVec; use rustc_codegen_ssa::debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, - VariableKind, FunctionDebugContextData}; + VariableKind, FunctionDebugContextData, type_names}; use libc::c_uint; use std::cell::RefCell; @@ -44,7 +44,6 @@ use rustc_codegen_ssa::traits::*; pub mod gdb; mod utils; mod namespace; -mod type_names; pub mod metadata; mod create_scope_map; mod source_loc; @@ -422,7 +421,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> { let actual_type = cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), actual_type); // Add actual type name to <...> clause of function name - let actual_type_name = compute_debuginfo_type_name(cx, + let actual_type_name = compute_debuginfo_type_name(cx.tcx(), actual_type, true); name_to_append_suffix_to.push_str(&actual_type_name[..]); diff --git a/src/librustc_codegen_llvm/lib.rs b/src/librustc_codegen_llvm/lib.rs index 0aae6b46e3def..07fcbc19dd362 100644 --- a/src/librustc_codegen_llvm/lib.rs +++ b/src/librustc_codegen_llvm/lib.rs @@ -44,8 +44,6 @@ extern crate rustc_fs_util; #[macro_use] extern crate syntax; extern crate syntax_pos; extern crate rustc_errors as errors; -extern crate serialize; -extern crate tempfile; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::back::write::{CodegenContext, ModuleConfig, FatLTOInput}; @@ -74,13 +72,10 @@ use rustc_codegen_utils::codegen_backend::CodegenBackend; mod diagnostics; mod back { - mod archive; + pub mod archive; pub mod bytecode; - pub mod link; pub mod lto; pub mod write; - mod rpath; - pub mod wasm; } mod abi; @@ -332,8 +327,17 @@ impl CodegenBackend for LlvmCodegenBackend { // This should produce either a finished executable or library. sess.profiler(|p| p.start_activity(ProfileCategory::Linking, "link_crate")); time(sess, "linking", || { - back::link::link_binary(sess, &codegen_results, - outputs, &codegen_results.crate_name.as_str()); + use rustc_codegen_ssa::back::link::link_binary; + use crate::back::archive::LlvmArchiveBuilder; + + let target_cpu = crate::llvm_util::target_cpu(sess); + link_binary::>( + sess, + &codegen_results, + outputs, + &codegen_results.crate_name.as_str(), + target_cpu, + ); }); sess.profiler(|p| p.end_activity(ProfileCategory::Linking, "link_crate")); diff --git a/src/librustc_codegen_llvm/metadata.rs b/src/librustc_codegen_llvm/metadata.rs index a2df687d58f5a..7cf497cb5d036 100644 --- a/src/librustc_codegen_llvm/metadata.rs +++ b/src/librustc_codegen_llvm/metadata.rs @@ -5,6 +5,8 @@ use rustc::middle::cstore::MetadataLoader; use rustc_target::spec::Target; use rustc_data_structures::owning_ref::OwningRef; +use rustc_codegen_ssa::METADATA_FILENAME; + use std::path::Path; use std::ptr; use std::slice; @@ -12,8 +14,6 @@ use rustc_fs_util::path_to_c_string; pub use rustc_data_structures::sync::MetadataRef; -pub const METADATA_FILENAME: &str = "rust.metadata.bin"; - pub struct LlvmMetadataLoader; impl MetadataLoader for LlvmMetadataLoader { diff --git a/src/librustc_codegen_llvm/type_of.rs b/src/librustc_codegen_llvm/type_of.rs index 020447608eebe..d42fa8291618c 100644 --- a/src/librustc_codegen_llvm/type_of.rs +++ b/src/librustc_codegen_llvm/type_of.rs @@ -452,31 +452,27 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyLayout<'tcx> { _ => { let mut data_variant = match self.variants { + // Within the discriminant field, only the niche itself is + // always initialized, so we only check for a pointer at its + // offset. + // + // If the niche is a pointer, it's either valid (according + // to its type), or null (which the niche field's scalar + // validity range encodes). This allows using + // `dereferenceable_or_null` for e.g., `Option<&T>`, and + // this will continue to work as long as we don't start + // using more niches than just null (e.g., the first page of + // the address space, or unaligned pointers). layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Niche { dataful_variant, .. }, + discr_index, .. - } => { - // Only the niche itself is always initialized, - // so only check for a pointer at its offset. - // - // If the niche is a pointer, it's either valid - // (according to its type), or null (which the - // niche field's scalar validity range encodes). - // This allows using `dereferenceable_or_null` - // for e.g., `Option<&T>`, and this will continue - // to work as long as we don't start using more - // niches than just null (e.g., the first page - // of the address space, or unaligned pointers). - if self.fields.offset(0) == offset { - Some(self.for_variant(cx, dataful_variant)) - } else { - None - } - } - _ => Some(*self) + } if self.fields.offset(discr_index) == offset => + Some(self.for_variant(cx, dataful_variant)), + _ => Some(*self), }; if let Some(variant) = data_variant { diff --git a/src/librustc_codegen_ssa/Cargo.toml b/src/librustc_codegen_ssa/Cargo.toml index 4702e34aa19e7..af99d39182c24 100644 --- a/src/librustc_codegen_ssa/Cargo.toml +++ b/src/librustc_codegen_ssa/Cargo.toml @@ -20,6 +20,7 @@ log = "0.4.5" libc = "0.2.44" jobserver = "0.1.11" parking_lot = "0.7" +tempfile = "3.0.5" serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } diff --git a/src/librustc_codegen_ssa/back/archive.rs b/src/librustc_codegen_ssa/back/archive.rs index 0a16d1b03e2a1..23d580ef08b2d 100644 --- a/src/librustc_codegen_ssa/back/archive.rs +++ b/src/librustc_codegen_ssa/back/archive.rs @@ -1,6 +1,7 @@ use rustc::session::Session; -use std::path::PathBuf; +use std::io; +use std::path::{Path, PathBuf}; pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session) -> PathBuf { @@ -24,3 +25,23 @@ pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session) sess.fatal(&format!("could not find native static library `{}`, \ perhaps an -L flag is missing?", name)); } + +pub trait ArchiveBuilder<'a> { + fn new(sess: &'a Session, output: &Path, input: Option<&Path>) -> Self; + + fn add_file(&mut self, path: &Path); + fn remove_file(&mut self, name: &str); + fn src_files(&mut self) -> Vec; + + fn add_rlib( + &mut self, + path: &Path, + name: &str, + lto: bool, + skip_objects: bool, + ) -> io::Result<()>; + fn add_native_library(&mut self, name: &str); + fn update_symbols(&mut self); + + fn build(self); +} diff --git a/src/librustc_codegen_ssa/back/link.rs b/src/librustc_codegen_ssa/back/link.rs index a0e2dcd646df8..308f47d7d7ed4 100644 --- a/src/librustc_codegen_ssa/back/link.rs +++ b/src/librustc_codegen_ssa/back/link.rs @@ -1,21 +1,40 @@ /// For all the linkers we support, and information they might /// need out of the shared crate context before we get rid of it. -use rustc::session::{Session, config}; +use rustc::session::{Session, filesearch}; +use rustc::session::config::{ + self, RUST_CGU_EXT, DebugInfo, OutputFilenames, OutputType, PrintRequest, Sanitizer +}; use rustc::session::search_paths::PathKind; use rustc::middle::dependency_format::Linkage; -use rustc::middle::cstore::LibSource; -use rustc_target::spec::LinkerFlavor; +use rustc::middle::cstore::{LibSource, NativeLibrary, NativeLibraryKind}; +use rustc::util::common::{time, time_ext}; use rustc::hir::def_id::CrateNum; +use rustc_data_structures::fx::FxHashSet; +use rustc_fs_util::fix_windows_verbatim_for_gcc; +use rustc_target::spec::{PanicStrategy, RelroLevel, LinkerFlavor}; +use crate::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION, CrateInfo, CodegenResults}; +use super::archive::ArchiveBuilder; use super::command::Command; -use crate::CrateInfo; +use super::linker::Linker; +use super::rpath::{self, RPathConfig}; use cc::windows_registry; +use tempfile::{Builder as TempFileBuilder, TempDir}; + +use std::ascii; +use std::char; +use std::fmt; use std::fs; +use std::io; use std::path::{Path, PathBuf}; +use std::process::{Output, Stdio}; +use std::str; use std::env; +pub use rustc_codegen_utils::link::*; + pub fn remove(sess: &Session, path: &Path) { if let Err(e) = fs::remove_file(path) { sess.err(&format!("failed to remove {}: {}", @@ -24,6 +43,129 @@ pub fn remove(sess: &Session, path: &Path) { } } +/// Performs the linkage portion of the compilation phase. This will generate all +/// of the requested outputs for this compilation session. +pub fn link_binary<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, + codegen_results: &CodegenResults, + outputs: &OutputFilenames, + crate_name: &str, + target_cpu: &str) -> Vec { + let mut out_filenames = Vec::new(); + for &crate_type in sess.crate_types.borrow().iter() { + // Ignore executable crates if we have -Z no-codegen, as they will error. + let output_metadata = sess.opts.output_types.contains_key(&OutputType::Metadata); + if (sess.opts.debugging_opts.no_codegen || !sess.opts.output_types.should_codegen()) && + !output_metadata && + crate_type == config::CrateType::Executable { + continue; + } + + if invalid_output_for_target(sess, crate_type) { + bug!("invalid output type `{:?}` for target os `{}`", + crate_type, sess.opts.target_triple); + } + let out_files = link_binary_output::(sess, + codegen_results, + crate_type, + outputs, + crate_name, + target_cpu); + out_filenames.extend(out_files); + } + + // Remove the temporary object file and metadata if we aren't saving temps + if !sess.opts.cg.save_temps { + if sess.opts.output_types.should_codegen() && !preserve_objects_for_their_debuginfo(sess) { + for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { + remove(sess, obj); + } + } + for obj in codegen_results.modules.iter().filter_map(|m| m.bytecode_compressed.as_ref()) { + remove(sess, obj); + } + if let Some(ref obj) = codegen_results.metadata_module.object { + remove(sess, obj); + } + if let Some(ref allocator) = codegen_results.allocator_module { + if let Some(ref obj) = allocator.object { + remove(sess, obj); + } + if let Some(ref bc) = allocator.bytecode_compressed { + remove(sess, bc); + } + } + } + + out_filenames +} + +fn link_binary_output<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, + codegen_results: &CodegenResults, + crate_type: config::CrateType, + outputs: &OutputFilenames, + crate_name: &str, + target_cpu: &str) -> Vec { + for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { + check_file_is_writeable(obj, sess); + } + + let mut out_filenames = vec![]; + + if outputs.outputs.contains_key(&OutputType::Metadata) { + let out_filename = filename_for_metadata(sess, crate_name, outputs); + // To avoid races with another rustc process scanning the output directory, + // we need to write the file somewhere else and atomically move it to its + // final destination, with a `fs::rename` call. In order for the rename to + // always succeed, the temporary file needs to be on the same filesystem, + // which is why we create it inside the output directory specifically. + let metadata_tmpdir = TempFileBuilder::new() + .prefix("rmeta") + .tempdir_in(out_filename.parent().unwrap()) + .unwrap_or_else(|err| sess.fatal(&format!("couldn't create a temp dir: {}", err))); + let metadata = emit_metadata(sess, codegen_results, &metadata_tmpdir); + if let Err(e) = fs::rename(metadata, &out_filename) { + sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e)); + } + out_filenames.push(out_filename); + } + + let tmpdir = TempFileBuilder::new().prefix("rustc").tempdir().unwrap_or_else(|err| + sess.fatal(&format!("couldn't create a temp dir: {}", err))); + + if outputs.outputs.should_codegen() { + let out_filename = out_filename(sess, crate_type, outputs, crate_name); + match crate_type { + config::CrateType::Rlib => { + link_rlib::(sess, + codegen_results, + RlibFlavor::Normal, + &out_filename, + &tmpdir).build(); + } + config::CrateType::Staticlib => { + link_staticlib::(sess, codegen_results, &out_filename, &tmpdir); + } + _ => { + link_natively::( + sess, + crate_type, + &out_filename, + codegen_results, + tmpdir.path(), + target_cpu, + ); + } + } + out_filenames.push(out_filename); + } + + if sess.opts.cg.save_temps { + let _ = tmpdir.into_path(); + } + + out_filenames +} + // The third parameter is for env vars, used on windows to set up the // path for MSVC to find its DLLs, and gcc to find its bundled // toolchain @@ -116,6 +258,444 @@ pub fn each_linked_rlib(sess: &Session, Ok(()) } +/// We use a temp directory here to avoid races between concurrent rustc processes, +/// such as builds in the same directory using the same filename for metadata while +/// building an `.rlib` (stomping over one another), or writing an `.rmeta` into a +/// directory being searched for `extern crate` (observing an incomplete file). +/// The returned path is the temporary file containing the complete metadata. +fn emit_metadata<'a>( + sess: &'a Session, + codegen_results: &CodegenResults, + tmpdir: &TempDir +) -> PathBuf { + let out_filename = tmpdir.path().join(METADATA_FILENAME); + let result = fs::write(&out_filename, &codegen_results.metadata.raw_data); + + if let Err(e) = result { + sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e)); + } + + out_filename +} + +// Create an 'rlib' +// +// An rlib in its current incarnation is essentially a renamed .a file. The +// rlib primarily contains the object file of the crate, but it also contains +// all of the object files from native libraries. This is done by unzipping +// native libraries and inserting all of the contents into this archive. +fn link_rlib<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, + codegen_results: &CodegenResults, + flavor: RlibFlavor, + out_filename: &Path, + tmpdir: &TempDir) -> B { + info!("preparing rlib to {:?}", out_filename); + let mut ab = ::new(sess, out_filename, None); + + for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { + ab.add_file(obj); + } + + // Note that in this loop we are ignoring the value of `lib.cfg`. That is, + // we may not be configured to actually include a static library if we're + // adding it here. That's because later when we consume this rlib we'll + // decide whether we actually needed the static library or not. + // + // To do this "correctly" we'd need to keep track of which libraries added + // which object files to the archive. We don't do that here, however. The + // #[link(cfg(..))] feature is unstable, though, and only intended to get + // liblibc working. In that sense the check below just indicates that if + // there are any libraries we want to omit object files for at link time we + // just exclude all custom object files. + // + // Eventually if we want to stabilize or flesh out the #[link(cfg(..))] + // feature then we'll need to figure out how to record what objects were + // loaded from the libraries found here and then encode that into the + // metadata of the rlib we're generating somehow. + for lib in codegen_results.crate_info.used_libraries.iter() { + match lib.kind { + NativeLibraryKind::NativeStatic => {} + NativeLibraryKind::NativeStaticNobundle | + NativeLibraryKind::NativeFramework | + NativeLibraryKind::NativeUnknown => continue, + } + if let Some(name) = lib.name { + ab.add_native_library(&name.as_str()); + } + } + + // After adding all files to the archive, we need to update the + // symbol table of the archive. + ab.update_symbols(); + + // Note that it is important that we add all of our non-object "magical + // files" *after* all of the object files in the archive. The reason for + // this is as follows: + // + // * When performing LTO, this archive will be modified to remove + // objects from above. The reason for this is described below. + // + // * When the system linker looks at an archive, it will attempt to + // determine the architecture of the archive in order to see whether its + // linkable. + // + // The algorithm for this detection is: iterate over the files in the + // archive. Skip magical SYMDEF names. Interpret the first file as an + // object file. Read architecture from the object file. + // + // * As one can probably see, if "metadata" and "foo.bc" were placed + // before all of the objects, then the architecture of this archive would + // not be correctly inferred once 'foo.o' is removed. + // + // Basically, all this means is that this code should not move above the + // code above. + match flavor { + RlibFlavor::Normal => { + // Instead of putting the metadata in an object file section, rlibs + // contain the metadata in a separate file. + ab.add_file(&emit_metadata(sess, codegen_results, tmpdir)); + + // For LTO purposes, the bytecode of this library is also inserted + // into the archive. + for bytecode in codegen_results + .modules + .iter() + .filter_map(|m| m.bytecode_compressed.as_ref()) + { + ab.add_file(bytecode); + } + + // After adding all files to the archive, we need to update the + // symbol table of the archive. This currently dies on macOS (see + // #11162), and isn't necessary there anyway + if !sess.target.target.options.is_like_osx { + ab.update_symbols(); + } + } + + RlibFlavor::StaticlibBase => { + let obj = codegen_results.allocator_module + .as_ref() + .and_then(|m| m.object.as_ref()); + if let Some(obj) = obj { + ab.add_file(obj); + } + } + } + + ab +} + +// Create a static archive +// +// This is essentially the same thing as an rlib, but it also involves adding +// all of the upstream crates' objects into the archive. This will slurp in +// all of the native libraries of upstream dependencies as well. +// +// Additionally, there's no way for us to link dynamic libraries, so we warn +// about all dynamic library dependencies that they're not linked in. +// +// There's no need to include metadata in a static archive, so ensure to not +// link in the metadata object file (and also don't prepare the archive with a +// metadata file). +fn link_staticlib<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, + codegen_results: &CodegenResults, + out_filename: &Path, + tempdir: &TempDir) { + let mut ab = link_rlib::(sess, + codegen_results, + RlibFlavor::StaticlibBase, + out_filename, + tempdir); + let mut all_native_libs = vec![]; + + let res = each_linked_rlib(sess, &codegen_results.crate_info, &mut |cnum, path| { + let name = &codegen_results.crate_info.crate_name[&cnum]; + let native_libs = &codegen_results.crate_info.native_libraries[&cnum]; + + // Here when we include the rlib into our staticlib we need to make a + // decision whether to include the extra object files along the way. + // These extra object files come from statically included native + // libraries, but they may be cfg'd away with #[link(cfg(..))]. + // + // This unstable feature, though, only needs liblibc to work. The only + // use case there is where musl is statically included in liblibc.rlib, + // so if we don't want the included version we just need to skip it. As + // a result the logic here is that if *any* linked library is cfg'd away + // we just skip all object files. + // + // Clearly this is not sufficient for a general purpose feature, and + // we'd want to read from the library's metadata to determine which + // object files come from where and selectively skip them. + let skip_object_files = native_libs.iter().any(|lib| { + lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) + }); + ab.add_rlib(path, + &name.as_str(), + are_upstream_rust_objects_already_included(sess) && + !ignored_for_lto(sess, &codegen_results.crate_info, cnum), + skip_object_files).unwrap(); + + all_native_libs.extend(codegen_results.crate_info.native_libraries[&cnum].iter().cloned()); + }); + if let Err(e) = res { + sess.fatal(&e); + } + + ab.update_symbols(); + ab.build(); + + if !all_native_libs.is_empty() { + if sess.opts.prints.contains(&PrintRequest::NativeStaticLibs) { + print_native_static_libs(sess, &all_native_libs); + } + } +} + +// Create a dynamic library or executable +// +// This will invoke the system linker/cc to create the resulting file. This +// links to all upstream files as well. +fn link_natively<'a, B: ArchiveBuilder<'a>>(sess: &'a Session, + crate_type: config::CrateType, + out_filename: &Path, + codegen_results: &CodegenResults, + tmpdir: &Path, + target_cpu: &str) { + info!("preparing {:?} to {:?}", crate_type, out_filename); + let (linker, flavor) = linker_and_flavor(sess); + + // The invocations of cc share some flags across platforms + let (pname, mut cmd) = get_linker(sess, &linker, flavor); + + if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) { + cmd.args(args); + } + if let Some(args) = sess.target.target.options.pre_link_args_crt.get(&flavor) { + if sess.crt_static() { + cmd.args(args); + } + } + if let Some(ref args) = sess.opts.debugging_opts.pre_link_args { + cmd.args(args); + } + cmd.args(&sess.opts.debugging_opts.pre_link_arg); + + if sess.target.target.options.is_like_fuchsia { + let prefix = match sess.opts.debugging_opts.sanitizer { + Some(Sanitizer::Address) => "asan/", + _ => "", + }; + cmd.arg(format!("--dynamic-linker={}ld.so.1", prefix)); + } + + let pre_link_objects = if crate_type == config::CrateType::Executable { + &sess.target.target.options.pre_link_objects_exe + } else { + &sess.target.target.options.pre_link_objects_dll + }; + for obj in pre_link_objects { + cmd.arg(get_file_path(sess, obj)); + } + + if crate_type == config::CrateType::Executable && sess.crt_static() { + for obj in &sess.target.target.options.pre_link_objects_exe_crt { + cmd.arg(get_file_path(sess, obj)); + } + } + + if sess.target.target.options.is_like_emscripten { + cmd.arg("-s"); + cmd.arg(if sess.panic_strategy() == PanicStrategy::Abort { + "DISABLE_EXCEPTION_CATCHING=1" + } else { + "DISABLE_EXCEPTION_CATCHING=0" + }); + } + + { + let mut linker = codegen_results.linker_info.to_linker(cmd, &sess, flavor, target_cpu); + link_args::(&mut *linker, flavor, sess, crate_type, tmpdir, + out_filename, codegen_results); + cmd = linker.finalize(); + } + if let Some(args) = sess.target.target.options.late_link_args.get(&flavor) { + cmd.args(args); + } + for obj in &sess.target.target.options.post_link_objects { + cmd.arg(get_file_path(sess, obj)); + } + if sess.crt_static() { + for obj in &sess.target.target.options.post_link_objects_crt { + cmd.arg(get_file_path(sess, obj)); + } + } + if let Some(args) = sess.target.target.options.post_link_args.get(&flavor) { + cmd.args(args); + } + for &(ref k, ref v) in &sess.target.target.options.link_env { + cmd.env(k, v); + } + + if sess.opts.debugging_opts.print_link_args { + println!("{:?}", &cmd); + } + + // May have not found libraries in the right formats. + sess.abort_if_errors(); + + // Invoke the system linker + // + // Note that there's a terribly awful hack that really shouldn't be present + // in any compiler. Here an environment variable is supported to + // automatically retry the linker invocation if the linker looks like it + // segfaulted. + // + // Gee that seems odd, normally segfaults are things we want to know about! + // Unfortunately though in rust-lang/rust#38878 we're experiencing the + // linker segfaulting on Travis quite a bit which is causing quite a bit of + // pain to land PRs when they spuriously fail due to a segfault. + // + // The issue #38878 has some more debugging information on it as well, but + // this unfortunately looks like it's just a race condition in macOS's linker + // with some thread pool working in the background. It seems that no one + // currently knows a fix for this so in the meantime we're left with this... + info!("{:?}", &cmd); + let retry_on_segfault = env::var("RUSTC_RETRY_LINKER_ON_SEGFAULT").is_ok(); + let mut prog; + let mut i = 0; + loop { + i += 1; + prog = time(sess, "running linker", || { + exec_linker(sess, &mut cmd, out_filename, tmpdir) + }); + let output = match prog { + Ok(ref output) => output, + Err(_) => break, + }; + if output.status.success() { + break + } + let mut out = output.stderr.clone(); + out.extend(&output.stdout); + let out = String::from_utf8_lossy(&out); + + // Check to see if the link failed with "unrecognized command line option: + // '-no-pie'" for gcc or "unknown argument: '-no-pie'" for clang. If so, + // reperform the link step without the -no-pie option. This is safe because + // if the linker doesn't support -no-pie then it should not default to + // linking executables as pie. Different versions of gcc seem to use + // different quotes in the error message so don't check for them. + if sess.target.target.options.linker_is_gnu && + flavor != LinkerFlavor::Ld && + (out.contains("unrecognized command line option") || + out.contains("unknown argument")) && + out.contains("-no-pie") && + cmd.get_args().iter().any(|e| e.to_string_lossy() == "-no-pie") { + info!("linker output: {:?}", out); + warn!("Linker does not support -no-pie command line option. Retrying without."); + for arg in cmd.take_args() { + if arg.to_string_lossy() != "-no-pie" { + cmd.arg(arg); + } + } + info!("{:?}", &cmd); + continue; + } + if !retry_on_segfault || i > 3 { + break + } + let msg_segv = "clang: error: unable to execute command: Segmentation fault: 11"; + let msg_bus = "clang: error: unable to execute command: Bus error: 10"; + if !(out.contains(msg_segv) || out.contains(msg_bus)) { + break + } + + warn!( + "looks like the linker segfaulted when we tried to call it, \ + automatically retrying again. cmd = {:?}, out = {}.", + cmd, + out, + ); + } + + match prog { + Ok(prog) => { + fn escape_string(s: &[u8]) -> String { + str::from_utf8(s).map(|s| s.to_owned()) + .unwrap_or_else(|_| { + let mut x = "Non-UTF-8 output: ".to_string(); + x.extend(s.iter() + .flat_map(|&b| ascii::escape_default(b)) + .map(char::from)); + x + }) + } + if !prog.status.success() { + let mut output = prog.stderr.clone(); + output.extend_from_slice(&prog.stdout); + sess.struct_err(&format!("linking with `{}` failed: {}", + pname.display(), + prog.status)) + .note(&format!("{:?}", &cmd)) + .note(&escape_string(&output)) + .emit(); + sess.abort_if_errors(); + } + info!("linker stderr:\n{}", escape_string(&prog.stderr)); + info!("linker stdout:\n{}", escape_string(&prog.stdout)); + }, + Err(e) => { + let linker_not_found = e.kind() == io::ErrorKind::NotFound; + + let mut linker_error = { + if linker_not_found { + sess.struct_err(&format!("linker `{}` not found", pname.display())) + } else { + sess.struct_err(&format!("could not exec the linker `{}`", pname.display())) + } + }; + + linker_error.note(&e.to_string()); + + if !linker_not_found { + linker_error.note(&format!("{:?}", &cmd)); + } + + linker_error.emit(); + + if sess.target.target.options.is_like_msvc && linker_not_found { + sess.note_without_error("the msvc targets depend on the msvc linker \ + but `link.exe` was not found"); + sess.note_without_error("please ensure that VS 2013, VS 2015 or VS 2017 \ + was installed with the Visual C++ option"); + } + sess.abort_if_errors(); + } + } + + + // On macOS, debuggers need this utility to get run to do some munging of + // the symbols. Note, though, that if the object files are being preserved + // for their debug information there's no need for us to run dsymutil. + if sess.target.target.options.is_like_osx && + sess.opts.debuginfo != DebugInfo::None && + !preserve_objects_for_their_debuginfo(sess) + { + if let Err(e) = Command::new("dsymutil").arg(out_filename).output() { + sess.fatal(&format!("failed to run dsymutil: {}", e)) + } + } + + if sess.opts.target_triple.triple() == "wasm32-unknown-unknown" { + super::wasm::add_producer_section( + &out_filename, + &sess.edition().to_string(), + option_env!("CFG_VERSION").unwrap_or("unknown"), + ); + } +} + /// Returns a boolean indicating whether the specified crate should be ignored /// during LTO. /// @@ -200,3 +780,898 @@ pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) { bug!("Not enough information provided to determine how to invoke the linker"); } + +/// Returns a boolean indicating whether we should preserve the object files on +/// the filesystem for their debug information. This is often useful with +/// split-dwarf like schemes. +pub fn preserve_objects_for_their_debuginfo(sess: &Session) -> bool { + // If the objects don't have debuginfo there's nothing to preserve. + if sess.opts.debuginfo == config::DebugInfo::None { + return false + } + + // If we're only producing artifacts that are archives, no need to preserve + // the objects as they're losslessly contained inside the archives. + let output_linked = sess.crate_types.borrow() + .iter() + .any(|&x| x != config::CrateType::Rlib && x != config::CrateType::Staticlib); + if !output_linked { + return false + } + + // If we're on OSX then the equivalent of split dwarf is turned on by + // default. The final executable won't actually have any debug information + // except it'll have pointers to elsewhere. Historically we've always run + // `dsymutil` to "link all the dwarf together" but this is actually sort of + // a bummer for incremental compilation! (the whole point of split dwarf is + // that you don't do this sort of dwarf link). + // + // Basically as a result this just means that if we're on OSX and we're + // *not* running dsymutil then the object files are the only source of truth + // for debug information, so we must preserve them. + if sess.target.target.options.is_like_osx { + match sess.opts.debugging_opts.run_dsymutil { + // dsymutil is not being run, preserve objects + Some(false) => return true, + + // dsymutil is being run, no need to preserve the objects + Some(true) => return false, + + // The default historical behavior was to always run dsymutil, so + // we're preserving that temporarily, but we're likely to switch the + // default soon. + None => return false, + } + } + + false +} + +pub fn archive_search_paths(sess: &Session) -> Vec { + sess.target_filesearch(PathKind::Native).search_path_dirs() +} + +enum RlibFlavor { + Normal, + StaticlibBase, +} + +pub fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLibrary]) { + let lib_args: Vec<_> = all_native_libs.iter() + .filter(|l| relevant_lib(sess, l)) + .filter_map(|lib| { + let name = lib.name?; + match lib.kind { + NativeLibraryKind::NativeStaticNobundle | + NativeLibraryKind::NativeUnknown => { + if sess.target.target.options.is_like_msvc { + Some(format!("{}.lib", name)) + } else { + Some(format!("-l{}", name)) + } + }, + NativeLibraryKind::NativeFramework => { + // ld-only syntax, since there are no frameworks in MSVC + Some(format!("-framework {}", name)) + }, + // These are included, no need to print them + NativeLibraryKind::NativeStatic => None, + } + }) + .collect(); + if !lib_args.is_empty() { + sess.note_without_error("Link against the following native artifacts when linking \ + against this static library. The order and any duplication \ + can be significant on some platforms."); + // Prefix for greppability + sess.note_without_error(&format!("native-static-libs: {}", &lib_args.join(" "))); + } +} + +pub fn get_file_path(sess: &Session, name: &str) -> PathBuf { + let fs = sess.target_filesearch(PathKind::Native); + let file_path = fs.get_lib_path().join(name); + if file_path.exists() { + return file_path + } + for search_path in fs.search_paths() { + let file_path = search_path.dir.join(name); + if file_path.exists() { + return file_path + } + } + PathBuf::from(name) +} + +pub fn exec_linker(sess: &Session, cmd: &mut Command, out_filename: &Path, tmpdir: &Path) + -> io::Result +{ + // When attempting to spawn the linker we run a risk of blowing out the + // size limits for spawning a new process with respect to the arguments + // we pass on the command line. + // + // Here we attempt to handle errors from the OS saying "your list of + // arguments is too big" by reinvoking the linker again with an `@`-file + // that contains all the arguments. The theory is that this is then + // accepted on all linkers and the linker will read all its options out of + // there instead of looking at the command line. + if !cmd.very_likely_to_exceed_some_spawn_limit() { + match cmd.command().stdout(Stdio::piped()).stderr(Stdio::piped()).spawn() { + Ok(child) => { + let output = child.wait_with_output(); + flush_linked_file(&output, out_filename)?; + return output; + } + Err(ref e) if command_line_too_big(e) => { + info!("command line to linker was too big: {}", e); + } + Err(e) => return Err(e) + } + } + + info!("falling back to passing arguments to linker via an @-file"); + let mut cmd2 = cmd.clone(); + let mut args = String::new(); + for arg in cmd2.take_args() { + args.push_str(&Escape { + arg: arg.to_str().unwrap(), + is_like_msvc: sess.target.target.options.is_like_msvc, + }.to_string()); + args.push_str("\n"); + } + let file = tmpdir.join("linker-arguments"); + let bytes = if sess.target.target.options.is_like_msvc { + let mut out = Vec::with_capacity((1 + args.len()) * 2); + // start the stream with a UTF-16 BOM + for c in std::iter::once(0xFEFF).chain(args.encode_utf16()) { + // encode in little endian + out.push(c as u8); + out.push((c >> 8) as u8); + } + out + } else { + args.into_bytes() + }; + fs::write(&file, &bytes)?; + cmd2.arg(format!("@{}", file.display())); + info!("invoking linker {:?}", cmd2); + let output = cmd2.output(); + flush_linked_file(&output, out_filename)?; + return output; + + #[cfg(unix)] + fn flush_linked_file(_: &io::Result, _: &Path) -> io::Result<()> { + Ok(()) + } + + #[cfg(windows)] + fn flush_linked_file(command_output: &io::Result, out_filename: &Path) + -> io::Result<()> + { + // On Windows, under high I/O load, output buffers are sometimes not flushed, + // even long after process exit, causing nasty, non-reproducible output bugs. + // + // File::sync_all() calls FlushFileBuffers() down the line, which solves the problem. + // + // А full writeup of the original Chrome bug can be found at + // randomascii.wordpress.com/2018/02/25/compiler-bug-linker-bug-windows-kernel-bug/amp + + if let &Ok(ref out) = command_output { + if out.status.success() { + if let Ok(of) = fs::OpenOptions::new().write(true).open(out_filename) { + of.sync_all()?; + } + } + } + + Ok(()) + } + + #[cfg(unix)] + fn command_line_too_big(err: &io::Error) -> bool { + err.raw_os_error() == Some(::libc::E2BIG) + } + + #[cfg(windows)] + fn command_line_too_big(err: &io::Error) -> bool { + const ERROR_FILENAME_EXCED_RANGE: i32 = 206; + err.raw_os_error() == Some(ERROR_FILENAME_EXCED_RANGE) + } + + struct Escape<'a> { + arg: &'a str, + is_like_msvc: bool, + } + + impl<'a> fmt::Display for Escape<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.is_like_msvc { + // This is "documented" at + // https://msdn.microsoft.com/en-us/library/4xdcbak7.aspx + // + // Unfortunately there's not a great specification of the + // syntax I could find online (at least) but some local + // testing showed that this seemed sufficient-ish to catch + // at least a few edge cases. + write!(f, "\"")?; + for c in self.arg.chars() { + match c { + '"' => write!(f, "\\{}", c)?, + c => write!(f, "{}", c)?, + } + } + write!(f, "\"")?; + } else { + // This is documented at https://linux.die.net/man/1/ld, namely: + // + // > Options in file are separated by whitespace. A whitespace + // > character may be included in an option by surrounding the + // > entire option in either single or double quotes. Any + // > character (including a backslash) may be included by + // > prefixing the character to be included with a backslash. + // + // We put an argument on each line, so all we need to do is + // ensure the line is interpreted as one whole argument. + for c in self.arg.chars() { + match c { + '\\' | ' ' => write!(f, "\\{}", c)?, + c => write!(f, "{}", c)?, + } + } + } + Ok(()) + } + } +} + +fn link_args<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, + flavor: LinkerFlavor, + sess: &'a Session, + crate_type: config::CrateType, + tmpdir: &Path, + out_filename: &Path, + codegen_results: &CodegenResults) { + + // Linker plugins should be specified early in the list of arguments + cmd.linker_plugin_lto(); + + // The default library location, we need this to find the runtime. + // The location of crates will be determined as needed. + let lib_path = sess.target_filesearch(PathKind::All).get_lib_path(); + + // target descriptor + let t = &sess.target.target; + + cmd.include_path(&fix_windows_verbatim_for_gcc(&lib_path)); + for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { + cmd.add_object(obj); + } + cmd.output_filename(out_filename); + + if crate_type == config::CrateType::Executable && + sess.target.target.options.is_like_windows { + if let Some(ref s) = codegen_results.windows_subsystem { + cmd.subsystem(s); + } + } + + // If we're building a dynamic library then some platforms need to make sure + // that all symbols are exported correctly from the dynamic library. + if crate_type != config::CrateType::Executable || + sess.target.target.options.is_like_emscripten { + cmd.export_symbols(tmpdir, crate_type); + } + + // When linking a dynamic library, we put the metadata into a section of the + // executable. This metadata is in a separate object file from the main + // object file, so we link that in here. + if crate_type == config::CrateType::Dylib || + crate_type == config::CrateType::ProcMacro { + if let Some(obj) = codegen_results.metadata_module.object.as_ref() { + cmd.add_object(obj); + } + } + + let obj = codegen_results.allocator_module + .as_ref() + .and_then(|m| m.object.as_ref()); + if let Some(obj) = obj { + cmd.add_object(obj); + } + + // Try to strip as much out of the generated object by removing unused + // sections if possible. See more comments in linker.rs + if !sess.opts.cg.link_dead_code { + let keep_metadata = crate_type == config::CrateType::Dylib; + cmd.gc_sections(keep_metadata); + } + + let used_link_args = &codegen_results.crate_info.link_args; + + if crate_type == config::CrateType::Executable { + let mut position_independent_executable = false; + + if t.options.position_independent_executables { + let empty_vec = Vec::new(); + let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec); + let more_args = &sess.opts.cg.link_arg; + let mut args = args.iter().chain(more_args.iter()).chain(used_link_args.iter()); + + if is_pic(sess) && !sess.crt_static() && !args.any(|x| *x == "-static") { + position_independent_executable = true; + } + } + + if position_independent_executable { + cmd.position_independent_executable(); + } else { + // recent versions of gcc can be configured to generate position + // independent executables by default. We have to pass -no-pie to + // explicitly turn that off. Not applicable to ld. + if sess.target.target.options.linker_is_gnu + && flavor != LinkerFlavor::Ld { + cmd.no_position_independent_executable(); + } + } + } + + let relro_level = match sess.opts.debugging_opts.relro_level { + Some(level) => level, + None => t.options.relro_level, + }; + match relro_level { + RelroLevel::Full => { + cmd.full_relro(); + }, + RelroLevel::Partial => { + cmd.partial_relro(); + }, + RelroLevel::Off => { + cmd.no_relro(); + }, + RelroLevel::None => { + }, + } + + // Pass optimization flags down to the linker. + cmd.optimize(); + + // Pass debuginfo flags down to the linker. + cmd.debuginfo(); + + // We want to, by default, prevent the compiler from accidentally leaking in + // any system libraries, so we may explicitly ask linkers to not link to any + // libraries by default. Note that this does not happen for windows because + // windows pulls in some large number of libraries and I couldn't quite + // figure out which subset we wanted. + // + // This is all naturally configurable via the standard methods as well. + if !sess.opts.cg.default_linker_libraries.unwrap_or(false) && + t.options.no_default_libraries + { + cmd.no_default_libraries(); + } + + // Take careful note of the ordering of the arguments we pass to the linker + // here. Linkers will assume that things on the left depend on things to the + // right. Things on the right cannot depend on things on the left. This is + // all formally implemented in terms of resolving symbols (libs on the right + // resolve unknown symbols of libs on the left, but not vice versa). + // + // For this reason, we have organized the arguments we pass to the linker as + // such: + // + // 1. The local object that LLVM just generated + // 2. Local native libraries + // 3. Upstream rust libraries + // 4. Upstream native libraries + // + // The rationale behind this ordering is that those items lower down in the + // list can't depend on items higher up in the list. For example nothing can + // depend on what we just generated (e.g., that'd be a circular dependency). + // Upstream rust libraries are not allowed to depend on our local native + // libraries as that would violate the structure of the DAG, in that + // scenario they are required to link to them as well in a shared fashion. + // + // Note that upstream rust libraries may contain native dependencies as + // well, but they also can't depend on what we just started to add to the + // link line. And finally upstream native libraries can't depend on anything + // in this DAG so far because they're only dylibs and dylibs can only depend + // on other dylibs (e.g., other native deps). + add_local_native_libraries(cmd, sess, codegen_results); + add_upstream_rust_crates::(cmd, sess, codegen_results, crate_type, tmpdir); + add_upstream_native_libraries(cmd, sess, codegen_results, crate_type); + + // Tell the linker what we're doing. + if crate_type != config::CrateType::Executable { + cmd.build_dylib(out_filename); + } + if crate_type == config::CrateType::Executable && sess.crt_static() { + cmd.build_static_executable(); + } + + if sess.opts.debugging_opts.pgo_gen.is_some() { + cmd.pgo_gen(); + } + + // FIXME (#2397): At some point we want to rpath our guesses as to + // where extern libraries might live, based on the + // addl_lib_search_paths + if sess.opts.cg.rpath { + let target_triple = sess.opts.target_triple.triple(); + let mut get_install_prefix_lib_path = || { + let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); + let tlib = filesearch::relative_target_lib_path(&sess.sysroot, target_triple); + let mut path = PathBuf::from(install_prefix); + path.push(&tlib); + + path + }; + let mut rpath_config = RPathConfig { + used_crates: &codegen_results.crate_info.used_crates_dynamic, + out_filename: out_filename.to_path_buf(), + has_rpath: sess.target.target.options.has_rpath, + is_like_osx: sess.target.target.options.is_like_osx, + linker_is_gnu: sess.target.target.options.linker_is_gnu, + get_install_prefix_lib_path: &mut get_install_prefix_lib_path, + }; + cmd.args(&rpath::get_rpath_flags(&mut rpath_config)); + } + + // Finally add all the linker arguments provided on the command line along + // with any #[link_args] attributes found inside the crate + if let Some(ref args) = sess.opts.cg.link_args { + cmd.args(args); + } + cmd.args(&sess.opts.cg.link_arg); + cmd.args(&used_link_args); +} + +// # Native library linking +// +// User-supplied library search paths (-L on the command line). These are +// the same paths used to find Rust crates, so some of them may have been +// added already by the previous crate linking code. This only allows them +// to be found at compile time so it is still entirely up to outside +// forces to make sure that library can be found at runtime. +// +// Also note that the native libraries linked here are only the ones located +// in the current crate. Upstream crates with native library dependencies +// may have their native library pulled in above. +pub fn add_local_native_libraries(cmd: &mut dyn Linker, + sess: &Session, + codegen_results: &CodegenResults) { + let filesearch = sess.target_filesearch(PathKind::All); + for search_path in filesearch.search_paths() { + match search_path.kind { + PathKind::Framework => { cmd.framework_path(&search_path.dir); } + _ => { cmd.include_path(&fix_windows_verbatim_for_gcc(&search_path.dir)); } + } + } + + let relevant_libs = codegen_results.crate_info.used_libraries.iter().filter(|l| { + relevant_lib(sess, l) + }); + + let search_path = archive_search_paths(sess); + for lib in relevant_libs { + let name = match lib.name { + Some(ref l) => l, + None => continue, + }; + match lib.kind { + NativeLibraryKind::NativeUnknown => cmd.link_dylib(&name.as_str()), + NativeLibraryKind::NativeFramework => cmd.link_framework(&name.as_str()), + NativeLibraryKind::NativeStaticNobundle => cmd.link_staticlib(&name.as_str()), + NativeLibraryKind::NativeStatic => cmd.link_whole_staticlib(&name.as_str(), + &search_path) + } + } +} + +// # Rust Crate linking +// +// Rust crates are not considered at all when creating an rlib output. All +// dependencies will be linked when producing the final output (instead of +// the intermediate rlib version) +fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, + sess: &'a Session, + codegen_results: &CodegenResults, + crate_type: config::CrateType, + tmpdir: &Path) { + // All of the heavy lifting has previously been accomplished by the + // dependency_format module of the compiler. This is just crawling the + // output of that module, adding crates as necessary. + // + // Linking to a rlib involves just passing it to the linker (the linker + // will slurp up the object files inside), and linking to a dynamic library + // involves just passing the right -l flag. + + let formats = sess.dependency_formats.borrow(); + let data = formats.get(&crate_type).unwrap(); + + // Invoke get_used_crates to ensure that we get a topological sorting of + // crates. + let deps = &codegen_results.crate_info.used_crates_dynamic; + + // There's a few internal crates in the standard library (aka libcore and + // libstd) which actually have a circular dependence upon one another. This + // currently arises through "weak lang items" where libcore requires things + // like `rust_begin_unwind` but libstd ends up defining it. To get this + // circular dependence to work correctly in all situations we'll need to be + // sure to correctly apply the `--start-group` and `--end-group` options to + // GNU linkers, otherwise if we don't use any other symbol from the standard + // library it'll get discarded and the whole application won't link. + // + // In this loop we're calculating the `group_end`, after which crate to + // pass `--end-group` and `group_start`, before which crate to pass + // `--start-group`. We currently do this by passing `--end-group` after + // the first crate (when iterating backwards) that requires a lang item + // defined somewhere else. Once that's set then when we've defined all the + // necessary lang items we'll pass `--start-group`. + // + // Note that this isn't amazing logic for now but it should do the trick + // for the current implementation of the standard library. + let mut group_end = None; + let mut group_start = None; + let mut end_with = FxHashSet::default(); + let info = &codegen_results.crate_info; + for &(cnum, _) in deps.iter().rev() { + if let Some(missing) = info.missing_lang_items.get(&cnum) { + end_with.extend(missing.iter().cloned()); + if end_with.len() > 0 && group_end.is_none() { + group_end = Some(cnum); + } + } + end_with.retain(|item| info.lang_item_to_crate.get(item) != Some(&cnum)); + if end_with.len() == 0 && group_end.is_some() { + group_start = Some(cnum); + break + } + } + + // If we didn't end up filling in all lang items from upstream crates then + // we'll be filling it in with our crate. This probably means we're the + // standard library itself, so skip this for now. + if group_end.is_some() && group_start.is_none() { + group_end = None; + } + + let mut compiler_builtins = None; + + for &(cnum, _) in deps.iter() { + if group_start == Some(cnum) { + cmd.group_start(); + } + + // We may not pass all crates through to the linker. Some crates may + // appear statically in an existing dylib, meaning we'll pick up all the + // symbols from the dylib. + let src = &codegen_results.crate_info.used_crate_source[&cnum]; + match data[cnum.as_usize() - 1] { + _ if codegen_results.crate_info.profiler_runtime == Some(cnum) => { + add_static_crate::(cmd, sess, codegen_results, tmpdir, crate_type, cnum); + } + _ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) => { + link_sanitizer_runtime::(cmd, sess, codegen_results, tmpdir, cnum); + } + // compiler-builtins are always placed last to ensure that they're + // linked correctly. + _ if codegen_results.crate_info.compiler_builtins == Some(cnum) => { + assert!(compiler_builtins.is_none()); + compiler_builtins = Some(cnum); + } + Linkage::NotLinked | + Linkage::IncludedFromDylib => {} + Linkage::Static => { + add_static_crate::(cmd, sess, codegen_results, tmpdir, crate_type, cnum); + } + Linkage::Dynamic => { + add_dynamic_crate(cmd, sess, &src.dylib.as_ref().unwrap().0) + } + } + + if group_end == Some(cnum) { + cmd.group_end(); + } + } + + // compiler-builtins are always placed last to ensure that they're + // linked correctly. + // We must always link the `compiler_builtins` crate statically. Even if it + // was already "included" in a dylib (e.g., `libstd` when `-C prefer-dynamic` + // is used) + if let Some(cnum) = compiler_builtins { + add_static_crate::(cmd, sess, codegen_results, tmpdir, crate_type, cnum); + } + + // Converts a library file-stem into a cc -l argument + fn unlib<'a>(config: &config::Config, stem: &'a str) -> &'a str { + if stem.starts_with("lib") && !config.target.options.is_like_windows { + &stem[3..] + } else { + stem + } + } + + // We must link the sanitizer runtime using -Wl,--whole-archive but since + // it's packed in a .rlib, it contains stuff that are not objects that will + // make the linker error. So we must remove those bits from the .rlib before + // linking it. + fn link_sanitizer_runtime<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, + sess: &'a Session, + codegen_results: &CodegenResults, + tmpdir: &Path, + cnum: CrateNum) { + let src = &codegen_results.crate_info.used_crate_source[&cnum]; + let cratepath = &src.rlib.as_ref().unwrap().0; + + if sess.target.target.options.is_like_osx { + // On Apple platforms, the sanitizer is always built as a dylib, and + // LLVM will link to `@rpath/*.dylib`, so we need to specify an + // rpath to the library as well (the rpath should be absolute, see + // PR #41352 for details). + // + // FIXME: Remove this logic into librustc_*san once Cargo supports it + let rpath = cratepath.parent().unwrap(); + let rpath = rpath.to_str().expect("non-utf8 component in path"); + cmd.args(&["-Wl,-rpath".into(), "-Xlinker".into(), rpath.into()]); + } + + let dst = tmpdir.join(cratepath.file_name().unwrap()); + let mut archive = ::new(sess, &dst, Some(cratepath)); + archive.update_symbols(); + + for f in archive.src_files() { + if f.ends_with(RLIB_BYTECODE_EXTENSION) || f == METADATA_FILENAME { + archive.remove_file(&f); + } + } + + archive.build(); + + cmd.link_whole_rlib(&dst); + } + + // Adds the static "rlib" versions of all crates to the command line. + // There's a bit of magic which happens here specifically related to LTO and + // dynamic libraries. Specifically: + // + // * For LTO, we remove upstream object files. + // * For dylibs we remove metadata and bytecode from upstream rlibs + // + // When performing LTO, almost(*) all of the bytecode from the upstream + // libraries has already been included in our object file output. As a + // result we need to remove the object files in the upstream libraries so + // the linker doesn't try to include them twice (or whine about duplicate + // symbols). We must continue to include the rest of the rlib, however, as + // it may contain static native libraries which must be linked in. + // + // (*) Crates marked with `#![no_builtins]` don't participate in LTO and + // their bytecode wasn't included. The object files in those libraries must + // still be passed to the linker. + // + // When making a dynamic library, linkers by default don't include any + // object files in an archive if they're not necessary to resolve the link. + // We basically want to convert the archive (rlib) to a dylib, though, so we + // *do* want everything included in the output, regardless of whether the + // linker thinks it's needed or not. As a result we must use the + // --whole-archive option (or the platform equivalent). When using this + // option the linker will fail if there are non-objects in the archive (such + // as our own metadata and/or bytecode). All in all, for rlibs to be + // entirely included in dylibs, we need to remove all non-object files. + // + // Note, however, that if we're not doing LTO or we're not producing a dylib + // (aka we're making an executable), we can just pass the rlib blindly to + // the linker (fast) because it's fine if it's not actually included as + // we're at the end of the dependency chain. + fn add_static_crate<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker, + sess: &'a Session, + codegen_results: &CodegenResults, + tmpdir: &Path, + crate_type: config::CrateType, + cnum: CrateNum) { + let src = &codegen_results.crate_info.used_crate_source[&cnum]; + let cratepath = &src.rlib.as_ref().unwrap().0; + + // See the comment above in `link_staticlib` and `link_rlib` for why if + // there's a static library that's not relevant we skip all object + // files. + let native_libs = &codegen_results.crate_info.native_libraries[&cnum]; + let skip_native = native_libs.iter().any(|lib| { + lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) + }); + + if (!are_upstream_rust_objects_already_included(sess) || + ignored_for_lto(sess, &codegen_results.crate_info, cnum)) && + crate_type != config::CrateType::Dylib && + !skip_native { + cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath)); + return + } + + let dst = tmpdir.join(cratepath.file_name().unwrap()); + let name = cratepath.file_name().unwrap().to_str().unwrap(); + let name = &name[3..name.len() - 5]; // chop off lib/.rlib + + time_ext(sess.time_extended(), Some(sess), &format!("altering {}.rlib", name), || { + let mut archive = ::new(sess, &dst, Some(cratepath)); + archive.update_symbols(); + + let mut any_objects = false; + for f in archive.src_files() { + if f.ends_with(RLIB_BYTECODE_EXTENSION) || f == METADATA_FILENAME { + archive.remove_file(&f); + continue + } + + let canonical = f.replace("-", "_"); + let canonical_name = name.replace("-", "_"); + + // Look for `.rcgu.o` at the end of the filename to conclude + // that this is a Rust-related object file. + fn looks_like_rust(s: &str) -> bool { + let path = Path::new(s); + let ext = path.extension().and_then(|s| s.to_str()); + if ext != Some(OutputType::Object.extension()) { + return false + } + let ext2 = path.file_stem() + .and_then(|s| Path::new(s).extension()) + .and_then(|s| s.to_str()); + ext2 == Some(RUST_CGU_EXT) + } + + let is_rust_object = + canonical.starts_with(&canonical_name) && + looks_like_rust(&f); + + // If we've been requested to skip all native object files + // (those not generated by the rust compiler) then we can skip + // this file. See above for why we may want to do this. + let skip_because_cfg_say_so = skip_native && !is_rust_object; + + // If we're performing LTO and this is a rust-generated object + // file, then we don't need the object file as it's part of the + // LTO module. Note that `#![no_builtins]` is excluded from LTO, + // though, so we let that object file slide. + let skip_because_lto = are_upstream_rust_objects_already_included(sess) && + is_rust_object && + (sess.target.target.options.no_builtins || + !codegen_results.crate_info.is_no_builtins.contains(&cnum)); + + if skip_because_cfg_say_so || skip_because_lto { + archive.remove_file(&f); + } else { + any_objects = true; + } + } + + if !any_objects { + return + } + archive.build(); + + // If we're creating a dylib, then we need to include the + // whole of each object in our archive into that artifact. This is + // because a `dylib` can be reused as an intermediate artifact. + // + // Note, though, that we don't want to include the whole of a + // compiler-builtins crate (e.g., compiler-rt) because it'll get + // repeatedly linked anyway. + if crate_type == config::CrateType::Dylib && + codegen_results.crate_info.compiler_builtins != Some(cnum) { + cmd.link_whole_rlib(&fix_windows_verbatim_for_gcc(&dst)); + } else { + cmd.link_rlib(&fix_windows_verbatim_for_gcc(&dst)); + } + }); + } + + // Same thing as above, but for dynamic crates instead of static crates. + fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) { + // Just need to tell the linker about where the library lives and + // what its name is + let parent = cratepath.parent(); + if let Some(dir) = parent { + cmd.include_path(&fix_windows_verbatim_for_gcc(dir)); + } + let filestem = cratepath.file_stem().unwrap().to_str().unwrap(); + cmd.link_rust_dylib(&unlib(&sess.target, filestem), + parent.unwrap_or(Path::new(""))); + } +} + +// Link in all of our upstream crates' native dependencies. Remember that +// all of these upstream native dependencies are all non-static +// dependencies. We've got two cases then: +// +// 1. The upstream crate is an rlib. In this case we *must* link in the +// native dependency because the rlib is just an archive. +// +// 2. The upstream crate is a dylib. In order to use the dylib, we have to +// have the dependency present on the system somewhere. Thus, we don't +// gain a whole lot from not linking in the dynamic dependency to this +// crate as well. +// +// The use case for this is a little subtle. In theory the native +// dependencies of a crate are purely an implementation detail of the crate +// itself, but the problem arises with generic and inlined functions. If a +// generic function calls a native function, then the generic function must +// be instantiated in the target crate, meaning that the native symbol must +// also be resolved in the target crate. +pub fn add_upstream_native_libraries(cmd: &mut dyn Linker, + sess: &Session, + codegen_results: &CodegenResults, + crate_type: config::CrateType) { + // Be sure to use a topological sorting of crates because there may be + // interdependencies between native libraries. When passing -nodefaultlibs, + // for example, almost all native libraries depend on libc, so we have to + // make sure that's all the way at the right (liblibc is near the base of + // the dependency chain). + // + // This passes RequireStatic, but the actual requirement doesn't matter, + // we're just getting an ordering of crate numbers, we're not worried about + // the paths. + let formats = sess.dependency_formats.borrow(); + let data = formats.get(&crate_type).unwrap(); + + let crates = &codegen_results.crate_info.used_crates_static; + for &(cnum, _) in crates { + for lib in codegen_results.crate_info.native_libraries[&cnum].iter() { + let name = match lib.name { + Some(ref l) => l, + None => continue, + }; + if !relevant_lib(sess, &lib) { + continue + } + match lib.kind { + NativeLibraryKind::NativeUnknown => cmd.link_dylib(&name.as_str()), + NativeLibraryKind::NativeFramework => cmd.link_framework(&name.as_str()), + NativeLibraryKind::NativeStaticNobundle => { + // Link "static-nobundle" native libs only if the crate they originate from + // is being linked statically to the current crate. If it's linked dynamically + // or is an rlib already included via some other dylib crate, the symbols from + // native libs will have already been included in that dylib. + if data[cnum.as_usize() - 1] == Linkage::Static { + cmd.link_staticlib(&name.as_str()) + } + }, + // ignore statically included native libraries here as we've + // already included them when we included the rust library + // previously + NativeLibraryKind::NativeStatic => {} + } + } + } +} + +pub fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool { + match lib.cfg { + Some(ref cfg) => syntax::attr::cfg_matches(cfg, &sess.parse_sess, None), + None => true, + } +} + +pub fn are_upstream_rust_objects_already_included(sess: &Session) -> bool { + match sess.lto() { + config::Lto::Fat => true, + config::Lto::Thin => { + // If we defer LTO to the linker, we haven't run LTO ourselves, so + // any upstream object files have not been copied yet. + !sess.opts.cg.linker_plugin_lto.enabled() + } + config::Lto::No | + config::Lto::ThinLocal => false, + } +} + +fn is_pic(sess: &Session) -> bool { + let reloc_model_arg = match sess.opts.cg.relocation_model { + Some(ref s) => &s[..], + None => &sess.target.target.options.relocation_model[..], + }; + + reloc_model_arg == "pic" +} diff --git a/src/librustc_codegen_ssa/back/mod.rs b/src/librustc_codegen_ssa/back/mod.rs index 888108408fbe3..a16d099ee3e4d 100644 --- a/src/librustc_codegen_ssa/back/mod.rs +++ b/src/librustc_codegen_ssa/back/mod.rs @@ -5,3 +5,5 @@ pub mod link; pub mod command; pub mod symbol_export; pub mod archive; +pub mod rpath; +pub mod wasm; diff --git a/src/librustc_codegen_llvm/back/rpath.rs b/src/librustc_codegen_ssa/back/rpath.rs similarity index 100% rename from src/librustc_codegen_llvm/back/rpath.rs rename to src/librustc_codegen_ssa/back/rpath.rs diff --git a/src/librustc_codegen_llvm/back/wasm.rs b/src/librustc_codegen_ssa/back/wasm.rs similarity index 100% rename from src/librustc_codegen_llvm/back/wasm.rs rename to src/librustc_codegen_ssa/back/wasm.rs diff --git a/src/librustc_codegen_ssa/debuginfo.rs b/src/librustc_codegen_ssa/debuginfo/mod.rs similarity index 99% rename from src/librustc_codegen_ssa/debuginfo.rs rename to src/librustc_codegen_ssa/debuginfo/mod.rs index aa7cdbed99446..d60a2e0cb1358 100644 --- a/src/librustc_codegen_ssa/debuginfo.rs +++ b/src/librustc_codegen_ssa/debuginfo/mod.rs @@ -1,6 +1,8 @@ use syntax_pos::{BytePos, Span}; use rustc::hir::def_id::CrateNum; +pub mod type_names; + pub enum FunctionDebugContext { RegularContext(FunctionDebugContextData), DebugInfoDisabled, diff --git a/src/librustc_codegen_llvm/debuginfo/type_names.rs b/src/librustc_codegen_ssa/debuginfo/type_names.rs similarity index 77% rename from src/librustc_codegen_llvm/debuginfo/type_names.rs rename to src/librustc_codegen_ssa/debuginfo/type_names.rs index eff7cd1bc8a48..fee6c5b04d0d1 100644 --- a/src/librustc_codegen_llvm/debuginfo/type_names.rs +++ b/src/librustc_codegen_ssa/debuginfo/type_names.rs @@ -1,31 +1,26 @@ // Type Names for Debug Info. -use crate::common::CodegenCx; -use rustc::hir::def_id::DefId; -use rustc::ty::subst::SubstsRef; -use rustc::ty::{self, Ty}; -use rustc_codegen_ssa::traits::*; +use rustc::hir::{self, def_id::DefId}; +use rustc::ty::{self, Ty, TyCtxt, subst::SubstsRef}; use rustc_data_structures::fx::FxHashSet; -use rustc::hir; - // Compute the name of the type as it should be stored in debuginfo. Does not do // any caching, i.e., calling the function twice with the same type will also do // the work twice. The `qualified` parameter only affects the first level of the // type name, further levels (i.e., type parameters) are always fully qualified. -pub fn compute_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, +pub fn compute_debuginfo_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, t: Ty<'tcx>, qualified: bool) -> String { let mut result = String::with_capacity(64); let mut visited = FxHashSet::default(); - push_debuginfo_type_name(cx, t, qualified, &mut result, &mut visited); + push_debuginfo_type_name(tcx, t, qualified, &mut result, &mut visited); result } // Pushes the name of the type as it should be stored in debuginfo on the // `output` String. See also compute_debuginfo_type_name(). -pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, +pub fn push_debuginfo_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, t: Ty<'tcx>, qualified: bool, output: &mut String, @@ -33,7 +28,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, // When targeting MSVC, emit C++ style type names for compatibility with // .natvis visualizers (and perhaps other existing native debuggers?) - let cpp_like_names = cx.sess().target.target.options.is_like_msvc; + let cpp_like_names = tcx.sess.target.target.options.is_like_msvc; match t.sty { ty::Bool => output.push_str("bool"), @@ -43,15 +38,15 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ty::Int(int_ty) => output.push_str(int_ty.ty_to_string()), ty::Uint(uint_ty) => output.push_str(uint_ty.ty_to_string()), ty::Float(float_ty) => output.push_str(float_ty.ty_to_string()), - ty::Foreign(def_id) => push_item_name(cx, def_id, qualified, output), + ty::Foreign(def_id) => push_item_name(tcx, def_id, qualified, output), ty::Adt(def, substs) => { - push_item_name(cx, def.did, qualified, output); - push_type_params(cx, substs, output, visited); + push_item_name(tcx, def.did, qualified, output); + push_type_params(tcx, substs, output, visited); }, ty::Tuple(component_types) => { output.push('('); for &component_type in component_types { - push_debuginfo_type_name(cx, component_type, true, output, visited); + push_debuginfo_type_name(tcx, component_type, true, output, visited); output.push_str(", "); } if !component_types.is_empty() { @@ -69,7 +64,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, hir::MutMutable => output.push_str("mut "), } - push_debuginfo_type_name(cx, inner_type, true, output, visited); + push_debuginfo_type_name(tcx, inner_type, true, output, visited); if cpp_like_names { output.push('*'); @@ -83,7 +78,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, output.push_str("mut "); } - push_debuginfo_type_name(cx, inner_type, true, output, visited); + push_debuginfo_type_name(tcx, inner_type, true, output, visited); if cpp_like_names { output.push('*'); @@ -91,8 +86,8 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, }, ty::Array(inner_type, len) => { output.push('['); - push_debuginfo_type_name(cx, inner_type, true, output, visited); - output.push_str(&format!("; {}", len.unwrap_usize(cx.tcx))); + push_debuginfo_type_name(tcx, inner_type, true, output, visited); + output.push_str(&format!("; {}", len.unwrap_usize(tcx))); output.push(']'); }, ty::Slice(inner_type) => { @@ -102,7 +97,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, output.push('['); } - push_debuginfo_type_name(cx, inner_type, true, output, visited); + push_debuginfo_type_name(tcx, inner_type, true, output, visited); if cpp_like_names { output.push('>'); @@ -112,12 +107,12 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, }, ty::Dynamic(ref trait_data, ..) => { if let Some(principal) = trait_data.principal() { - let principal = cx.tcx.normalize_erasing_late_bound_regions( + let principal = tcx.normalize_erasing_late_bound_regions( ty::ParamEnv::reveal_all(), &principal, ); - push_item_name(cx, principal.def_id, false, output); - push_type_params(cx, principal.substs, output, visited); + push_item_name(tcx, principal.def_id, false, output); + push_type_params(tcx, principal.substs, output, visited); } else { output.push_str("dyn '_"); } @@ -142,13 +137,13 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, } - let sig = t.fn_sig(cx.tcx); + let sig = t.fn_sig(tcx); if sig.unsafety() == hir::Unsafety::Unsafe { output.push_str("unsafe "); } let abi = sig.abi(); - if abi != crate::abi::Abi::Rust { + if abi != rustc_target::spec::abi::Abi::Rust { output.push_str("extern \""); output.push_str(abi.name()); output.push_str("\" "); @@ -156,10 +151,10 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, output.push_str("fn("); - let sig = cx.tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig); + let sig = tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig); if !sig.inputs().is_empty() { for ¶meter_type in sig.inputs() { - push_debuginfo_type_name(cx, parameter_type, true, output, visited); + push_debuginfo_type_name(tcx, parameter_type, true, output, visited); output.push_str(", "); } output.pop(); @@ -178,7 +173,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, if !sig.output().is_unit() { output.push_str(" -> "); - push_debuginfo_type_name(cx, sig.output(), true, output, visited); + push_debuginfo_type_name(tcx, sig.output(), true, output, visited); } @@ -213,18 +208,18 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, } } - fn push_item_name(cx: &CodegenCx<'_, '_>, + fn push_item_name(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, qualified: bool, output: &mut String) { if qualified { - output.push_str(&cx.tcx.crate_name(def_id.krate).as_str()); - for path_element in cx.tcx.def_path(def_id).data { + output.push_str(&tcx.crate_name(def_id.krate).as_str()); + for path_element in tcx.def_path(def_id).data { output.push_str("::"); output.push_str(&path_element.data.as_interned_str().as_str()); } } else { - output.push_str(&cx.tcx.item_name(def_id).as_str()); + output.push_str(&tcx.item_name(def_id).as_str()); } } @@ -233,7 +228,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, // reconstructed for items from non-local crates. For local crates, this // would be possible but with inlining and LTO we have to use the least // common denominator - otherwise we would run into conflicts. - fn push_type_params<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, + fn push_type_params<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, substs: SubstsRef<'tcx>, output: &mut String, visited: &mut FxHashSet>) { @@ -244,7 +239,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, output.push('<'); for type_parameter in substs.types() { - push_debuginfo_type_name(cx, type_parameter, true, output, visited); + push_debuginfo_type_name(tcx, type_parameter, true, output, visited); output.push_str(", "); } diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs index e2917578c0ece..59885090ecf0a 100644 --- a/src/librustc_codegen_ssa/lib.rs +++ b/src/librustc_codegen_ssa/lib.rs @@ -66,6 +66,7 @@ pub struct ModuleCodegen { pub kind: ModuleKind, } +pub const METADATA_FILENAME: &str = "rust.metadata.bin"; pub const RLIB_BYTECODE_EXTENSION: &str = "bc.z"; impl ModuleCodegen { diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index 2b7b900475332..1134707f96c92 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -216,19 +216,19 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> { if self.layout.abi.is_uninhabited() { return bx.cx().const_undef(cast_to); } - let (discr_scalar, discr_kind) = match self.layout.variants { + let (discr_scalar, discr_kind, discr_index) = match self.layout.variants { layout::Variants::Single { index } => { let discr_val = self.layout.ty.ty_adt_def().map_or( index.as_u32() as u128, |def| def.discriminant_for_variant(bx.cx().tcx(), index).val); return bx.cx().const_uint_big(cast_to, discr_val); } - layout::Variants::Multiple { ref discr, ref discr_kind, .. } => { - (discr, discr_kind) + layout::Variants::Multiple { ref discr, ref discr_kind, discr_index, .. } => { + (discr, discr_kind, discr_index) } }; - let discr = self.project_field(bx, 0); + let discr = self.project_field(bx, discr_index); let lldiscr = bx.load_operand(discr).immediate(); match *discr_kind { layout::DiscriminantKind::Tag => { @@ -292,9 +292,10 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> { } layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, + discr_index, .. } => { - let ptr = self.project_field(bx, 0); + let ptr = self.project_field(bx, discr_index); let to = self.layout.ty.ty_adt_def().unwrap() .discriminant_for_variant(bx.tcx(), variant_index) .val; @@ -309,6 +310,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> { ref niche_variants, niche_start, }, + discr_index, .. } => { if variant_index != dataful_variant { @@ -321,7 +323,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> { bx.memset(self.llval, fill_byte, size, self.align, MemFlags::empty()); } - let niche = self.project_field(bx, 0); + let niche = self.project_field(bx, discr_index); let niche_llty = bx.cx().immediate_backend_type(niche.layout); let niche_value = variant_index.as_u32() - niche_variants.start().as_u32(); let niche_value = (niche_value as u128) diff --git a/src/librustc_data_structures/graph/dominators/mod.rs b/src/librustc_data_structures/graph/dominators/mod.rs index aaed41d9fa362..93a2a261c6fde 100644 --- a/src/librustc_data_structures/graph/dominators/mod.rs +++ b/src/librustc_data_structures/graph/dominators/mod.rs @@ -8,8 +8,6 @@ use super::super::indexed_vec::{Idx, IndexVec}; use super::iterate::reverse_post_order; use super::ControlFlowGraph; -use std::fmt; - #[cfg(test)] mod test; @@ -158,48 +156,3 @@ impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> { } } } - -pub struct DominatorTree { - root: N, - children: IndexVec>, -} - -impl DominatorTree { - pub fn children(&self, node: Node) -> &[Node] { - &self.children[node] - } -} - -impl fmt::Debug for DominatorTree { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt( - &DominatorTreeNode { - tree: self, - node: self.root, - }, - fmt, - ) - } -} - -struct DominatorTreeNode<'tree, Node: Idx> { - tree: &'tree DominatorTree, - node: Node, -} - -impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let subtrees: Vec<_> = self.tree - .children(self.node) - .iter() - .map(|&child| DominatorTreeNode { - tree: self.tree, - node: child, - }) - .collect(); - fmt.debug_tuple("") - .field(&self.node) - .field(&subtrees) - .finish() - } -} diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 2781bfa3ec849..66c7c9d0eae7e 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -744,7 +744,7 @@ fn usage(verbose: bool, include_unstable_options: bool) { } let message = "Usage: rustc [OPTIONS] INPUT"; let nightly_help = if nightly_options::is_nightly_build() { - "\n -Z help Print internal options for debugging rustc" + "\n -Z help Print unstable compiler options" } else { "" }; @@ -892,7 +892,7 @@ Available lint options: } fn describe_debug_flags() { - println!("\nAvailable debug options:\n"); + println!("\nAvailable options:\n"); print_flag_list("-Z", config::DB_OPTIONS); } diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 5c4fd9dba7af4..6f5b411946c2d 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -599,7 +599,7 @@ impl<'a, 'tcx> FindAllAttrs<'a, 'tcx> { fn is_active_attr(&mut self, attr: &Attribute) -> bool { for attr_name in &self.attr_names { - if attr.check_name(attr_name) && check_config(self.tcx, attr) { + if attr.check_name(*attr_name) && check_config(self.tcx, attr) { return true; } } diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index 7423ce2e76010..d3223c6edb809 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -820,6 +820,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for VariantSizeDifferences { discr_kind: layout::DiscriminantKind::Tag, ref discr, ref variants, + .. } => (variants, discr), _ => return, }; diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index d41d97f58bcbe..2aee21abb58e5 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -228,7 +228,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes { let plugin_attributes = cx.sess().plugin_attributes.borrow_mut(); for &(ref name, ty) in plugin_attributes.iter() { - if ty == AttributeType::Whitelisted && attr.check_name(&name) { + if ty == AttributeType::Whitelisted && attr.check_name(&**name) { debug!("{:?} (plugin attr) is whitelisted with ty {:?}", name, ty); break; } diff --git a/src/librustc_macros/src/lib.rs b/src/librustc_macros/src/lib.rs index e99ceb1b0c79b..98fba55218f9b 100644 --- a/src/librustc_macros/src/lib.rs +++ b/src/librustc_macros/src/lib.rs @@ -9,10 +9,16 @@ use proc_macro::TokenStream; mod hash_stable; mod query; +mod symbols; #[proc_macro] pub fn rustc_queries(input: TokenStream) -> TokenStream { query::rustc_queries(input) } +#[proc_macro] +pub fn symbols(input: TokenStream) -> TokenStream { + symbols::symbols(input) +} + decl_derive!([HashStable, attributes(stable_hasher)] => hash_stable::hash_stable_derive); diff --git a/src/librustc_macros/src/symbols.rs b/src/librustc_macros/src/symbols.rs new file mode 100644 index 0000000000000..169c5e1371833 --- /dev/null +++ b/src/librustc_macros/src/symbols.rs @@ -0,0 +1,163 @@ +use proc_macro::TokenStream; +use syn::{ + Token, Ident, LitStr, + braced, parse_macro_input, +}; +use syn::parse::{Result, Parse, ParseStream}; +use syn; +use std::collections::HashSet; +use quote::quote; + +#[allow(non_camel_case_types)] +mod kw { + syn::custom_keyword!(Keywords); + syn::custom_keyword!(Other); +} + +struct Keyword { + name: Ident, + value: LitStr, +} + +impl Parse for Keyword { + fn parse(input: ParseStream<'_>) -> Result { + let name = input.parse()?; + input.parse::()?; + let value = input.parse()?; + input.parse::()?; + + Ok(Keyword { + name, + value, + }) + } +} + +struct Symbol(Ident); + +impl Parse for Symbol { + fn parse(input: ParseStream<'_>) -> Result { + let ident: Ident = input.parse()?; + input.parse::()?; + + Ok(Symbol(ident)) + } +} + +/// A type used to greedily parse another type until the input is empty. +struct List(Vec); + +impl Parse for List { + fn parse(input: ParseStream<'_>) -> Result { + let mut list = Vec::new(); + while !input.is_empty() { + list.push(input.parse()?); + } + Ok(List(list)) + } +} + +struct Input { + keywords: List, + symbols: List, +} + +impl Parse for Input { + fn parse(input: ParseStream<'_>) -> Result { + input.parse::()?; + let content; + braced!(content in input); + let keywords = content.parse()?; + + input.parse::()?; + let content; + braced!(content in input); + let symbols = content.parse()?; + + Ok(Input { + keywords, + symbols, + }) + } +} + +pub fn symbols(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as Input); + + let mut keyword_stream = quote! {}; + let mut symbols_stream = quote! {}; + let mut prefill_stream = quote! {}; + let mut from_str_stream = quote! {}; + let mut counter = 0u32; + let mut keys = HashSet::::new(); + + let mut check_dup = |str: &str| { + if !keys.insert(str.to_string()) { + panic!("Symbol `{}` is duplicated", str); + } + }; + + for keyword in &input.keywords.0 { + let name = &keyword.name; + let value = &keyword.value; + check_dup(&value.value()); + prefill_stream.extend(quote! { + #value, + }); + keyword_stream.extend(quote! { + pub const #name: Keyword = Keyword { + ident: Ident::with_empty_ctxt(super::Symbol::new(#counter)) + }; + }); + from_str_stream.extend(quote! { + #value => Ok(#name), + }); + counter += 1; + } + + for symbol in &input.symbols.0 { + let value = &symbol.0; + let value_str = value.to_string(); + check_dup(&value_str); + prefill_stream.extend(quote! { + #value_str, + }); + symbols_stream.extend(quote! { + pub const #value: Symbol = Symbol::new(#counter); + }); + counter += 1; + } + + TokenStream::from(quote! { + macro_rules! keywords { + () => { + #keyword_stream + + impl std::str::FromStr for Keyword { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + #from_str_stream + _ => Err(()), + } + } + } + } + } + + macro_rules! symbols { + () => { + #symbols_stream + } + } + + impl Interner { + pub fn fresh() -> Self { + Interner::prefill(&[ + #prefill_stream + ]) + } + } + }) +} diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 36d9bf9f50dd5..2405b9cd2f2d6 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -131,9 +131,9 @@ impl<'a> CrateLoader<'a> { // `source` stores paths which are normalized which may be different // from the strings on the command line. let source = &self.cstore.get_crate_data(cnum).source; - if let Some(locs) = self.sess.opts.externs.get(&*name.as_str()) { + if let Some(entry) = self.sess.opts.externs.get(&*name.as_str()) { // Only use `--extern crate_name=path` here, not `--extern crate_name`. - let found = locs.iter().filter_map(|l| l.as_ref()).any(|l| { + let found = entry.locations.iter().filter_map(|l| l.as_ref()).any(|l| { let l = fs::canonicalize(l).ok(); source.dylib.as_ref().map(|p| &p.0) == l.as_ref() || source.rlib.as_ref().map(|p| &p.0) == l.as_ref() @@ -195,12 +195,20 @@ impl<'a> CrateLoader<'a> { ident: Symbol, span: Span, lib: Library, - dep_kind: DepKind + dep_kind: DepKind, + name: Symbol ) -> (CrateNum, Lrc) { let crate_root = lib.metadata.get_root(); - info!("register crate `extern crate {} as {}`", crate_root.name, ident); self.verify_no_symbol_conflicts(span, &crate_root); + let private_dep = self.sess.opts.externs.get(&name.as_str()) + .map(|e| e.is_private_dep) + .unwrap_or(false); + + info!("register crate `extern crate {} as {}` (private_dep = {})", + crate_root.name, ident, private_dep); + + // Claim this crate number and cache it let cnum = self.cstore.alloc_new_crate_num(); @@ -272,7 +280,8 @@ impl<'a> CrateLoader<'a> { dylib, rlib, rmeta, - } + }, + private_dep }; let cmeta = Lrc::new(cmeta); @@ -390,7 +399,7 @@ impl<'a> CrateLoader<'a> { Ok((cnum, data)) } (LoadResult::Loaded(library), host_library) => { - Ok(self.register_crate(host_library, root, ident, span, library, dep_kind)) + Ok(self.register_crate(host_library, root, ident, span, library, dep_kind, name)) } _ => panic!() } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index d646879b4d45d..22a13f37722b8 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -79,6 +79,10 @@ pub struct CrateMetadata { pub source: CrateSource, pub proc_macros: Option)>>, + + /// Whether or not this crate should be consider a private dependency + /// for purposes of the 'exported_private_dependencies' lint + pub private_dep: bool } pub struct CStore { @@ -114,7 +118,8 @@ impl CStore { } pub(super) fn get_crate_data(&self, cnum: CrateNum) -> Lrc { - self.metas.borrow()[cnum].clone().unwrap() + self.metas.borrow()[cnum].clone() + .unwrap_or_else(|| panic!("Failed to get crate data for {:?}", cnum)) } pub(super) fn set_crate_data(&self, cnum: CrateNum, data: Lrc) { diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 995532a00cd6e..95fe9a70f750a 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -494,6 +494,10 @@ impl CrateStore for cstore::CStore { self.get_crate_data(cnum).name } + fn crate_is_private_dep_untracked(&self, cnum: CrateNum) -> bool { + self.get_crate_data(cnum).private_dep + } + fn crate_disambiguator_untracked(&self, cnum: CrateNum) -> CrateDisambiguator { self.get_crate_data(cnum).root.disambiguator diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index 81878c4f687b6..116042c53fb9e 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -442,11 +442,11 @@ impl<'a> Context<'a> { // must be loaded via -L plus some filtering. if self.hash.is_none() { self.should_match_name = false; - if let Some(s) = self.sess.opts.externs.get(&self.crate_name.as_str()) { + if let Some(entry) = self.sess.opts.externs.get(&self.crate_name.as_str()) { // Only use `--extern crate_name=path` here, not `--extern crate_name`. - if s.iter().any(|l| l.is_some()) { + if entry.locations.iter().any(|l| l.is_some()) { return self.find_commandline_library( - s.iter().filter_map(|l| l.as_ref()), + entry.locations.iter().filter_map(|l| l.as_ref()), ); } } diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs index 275d958a3ed31..0b2c90b916045 100644 --- a/src/librustc_mir/borrow_check/mod.rs +++ b/src/librustc_mir/borrow_check/mod.rs @@ -30,7 +30,7 @@ use std::rc::Rc; use syntax_pos::{Span, DUMMY_SP}; use crate::dataflow::indexes::{BorrowIndex, InitIndex, MoveOutIndex, MovePathIndex}; -use crate::dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MoveError}; +use crate::dataflow::move_paths::{HasMoveData, InitLocation, LookupResult, MoveData, MoveError}; use crate::dataflow::Borrows; use crate::dataflow::DataflowResultsConsumer; use crate::dataflow::FlowAtLocation; @@ -1277,25 +1277,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } = self.infcx.tcx.mir_borrowck(def_id); debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars); for field in used_mut_upvars { - // This relies on the current way that by-value - // captures of a closure are copied/moved directly - // when generating MIR. - match operands[field.index()] { - Operand::Move(Place::Base(PlaceBase::Local(local))) - | Operand::Copy(Place::Base(PlaceBase::Local(local))) => { - self.used_mut.insert(local); - } - Operand::Move(ref place @ Place::Projection(_)) - | Operand::Copy(ref place @ Place::Projection(_)) => { - if let Some(field) = place.is_upvar_field_projection( - self.mir, &self.infcx.tcx) { - self.used_mut_upvars.push(field); - } - } - Operand::Move(Place::Base(PlaceBase::Static(..))) - | Operand::Copy(Place::Base(PlaceBase::Static(..))) - | Operand::Constant(..) => {} - } + self.propagate_closure_used_mut_upvar(&operands[field.index()]); } } AggregateKind::Adt(..) @@ -1310,6 +1292,80 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } } + fn propagate_closure_used_mut_upvar(&mut self, operand: &Operand<'tcx>) { + let propagate_closure_used_mut_place = |this: &mut Self, place: &Place<'tcx>| { + match *place { + Place::Projection { .. } => { + if let Some(field) = place.is_upvar_field_projection( + this.mir, &this.infcx.tcx) { + this.used_mut_upvars.push(field); + } + } + Place::Base(PlaceBase::Local(local)) => { + this.used_mut.insert(local); + } + Place::Base(PlaceBase::Static(_)) => {} + } + }; + + // This relies on the current way that by-value + // captures of a closure are copied/moved directly + // when generating MIR. + match *operand { + Operand::Move(Place::Base(PlaceBase::Local(local))) + | Operand::Copy(Place::Base(PlaceBase::Local(local))) + if self.mir.local_decls[local].is_user_variable.is_none() => + { + if self.mir.local_decls[local].ty.is_mutable_pointer() { + // The variable will be marked as mutable by the borrow. + return; + } + // This is an edge case where we have a `move` closure + // inside a non-move closure, and the inner closure + // contains a mutation: + // + // let mut i = 0; + // || { move || { i += 1; }; }; + // + // In this case our usual strategy of assuming that the + // variable will be captured by mutable reference is + // wrong, since `i` can be copied into the inner + // closure from a shared reference. + // + // As such we have to search for the local that this + // capture comes from and mark it as being used as mut. + + let temp_mpi = self.move_data.rev_lookup.find_local(local); + let init = if let [init_index] = *self.move_data.init_path_map[temp_mpi] { + &self.move_data.inits[init_index] + } else { + bug!("temporary should be initialized exactly once") + }; + + let loc = match init.location { + InitLocation::Statement(stmt) => stmt, + _ => bug!("temporary initialized in arguments"), + }; + + let bbd = &self.mir[loc.block]; + let stmt = &bbd.statements[loc.statement_index]; + debug!("temporary assigned in: stmt={:?}", stmt); + + if let StatementKind::Assign(_, box Rvalue::Ref(_, _, ref source)) = stmt.kind { + propagate_closure_used_mut_place(self, source); + } else { + bug!("closures should only capture user variables \ + or references to user variables"); + } + } + Operand::Move(ref place) + | Operand::Copy(ref place) => { + propagate_closure_used_mut_place(self, place); + } + Operand::Constant(..) => {} + } + } + fn consume_operand( &mut self, context: Context, diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 7ea56e3647437..a3513cb1350c6 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -610,18 +610,19 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> ) -> EvalResult<'tcx, (u128, VariantIdx)> { trace!("read_discriminant_value {:#?}", rval.layout); - let discr_kind = match rval.layout.variants { + let (discr_kind, discr_index) = match rval.layout.variants { layout::Variants::Single { index } => { let discr_val = rval.layout.ty.ty_adt_def().map_or( index.as_u32() as u128, |def| def.discriminant_for_variant(*self.tcx, index).val); return Ok((discr_val, index)); } - layout::Variants::Multiple { ref discr_kind, .. } => discr_kind, + layout::Variants::Multiple { ref discr_kind, discr_index, .. } => + (discr_kind, discr_index), }; // read raw discriminant value - let discr_op = self.operand_field(rval, 0)?; + let discr_op = self.operand_field(rval, discr_index as u64)?; let discr_val = self.read_immediate(discr_op)?; let raw_discr = discr_val.to_scalar_or_undef(); trace!("discr value: {:?}", raw_discr); diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 4f7b59a5a9a95..461e57b3e4849 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -961,6 +961,7 @@ where layout::Variants::Multiple { discr_kind: layout::DiscriminantKind::Tag, ref discr, + discr_index, .. } => { let adt_def = dest.layout.ty.ty_adt_def().unwrap(); @@ -975,7 +976,7 @@ where let size = discr.value.size(self); let discr_val = truncate(discr_val, size); - let discr_dest = self.place_field(dest, 0)?; + let discr_dest = self.place_field(dest, discr_index as u64)?; self.write_scalar(Scalar::from_uint(discr_val, size), discr_dest)?; } layout::Variants::Multiple { @@ -984,6 +985,7 @@ where ref niche_variants, niche_start, }, + discr_index, .. } => { assert!( @@ -991,7 +993,7 @@ where ); if variant_index != dataful_variant { let niche_dest = - self.place_field(dest, 0)?; + self.place_field(dest, discr_index as u64)?; let niche_value = variant_index.as_u32() - niche_variants.start().as_u32(); let niche_value = (niche_value as u128) .wrapping_add(niche_start); diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 9a8970b2935e0..44621e5dc95d1 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -1540,7 +1540,6 @@ struct SearchInterfaceForPrivateItemsVisitor<'a, 'tcx: 'a> { has_pub_restricted: bool, has_old_errors: bool, in_assoc_ty: bool, - private_crates: FxHashSet } impl<'a, 'tcx: 'a> SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> { @@ -1622,7 +1621,7 @@ impl<'a, 'tcx: 'a> SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> { /// 2. It comes from a private crate fn leaks_private_dep(&self, item_id: DefId) -> bool { let ret = self.required_visibility == ty::Visibility::Public && - self.private_crates.contains(&item_id.krate); + self.tcx.is_private_dep(item_id.krate); log::debug!("leaks_private_dep(item_id={:?})={}", item_id, ret); return ret; @@ -1640,7 +1639,6 @@ struct PrivateItemsInPublicInterfacesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, has_pub_restricted: bool, old_error_set: &'a HirIdSet, - private_crates: FxHashSet } impl<'a, 'tcx> PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> { @@ -1678,7 +1676,6 @@ impl<'a, 'tcx> PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> { has_pub_restricted: self.has_pub_restricted, has_old_errors, in_assoc_ty: false, - private_crates: self.private_crates.clone() } } @@ -1876,17 +1873,11 @@ fn check_private_in_public<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, krate: CrateNum) { pub_restricted_visitor.has_pub_restricted }; - let private_crates: FxHashSet = tcx.sess.opts.extern_private.iter() - .flat_map(|c| { - tcx.crates().iter().find(|&&krate| &tcx.crate_name(krate) == c).cloned() - }).collect(); - // Check for private types and traits in public interfaces. let mut visitor = PrivateItemsInPublicInterfacesVisitor { tcx, has_pub_restricted, old_error_set: &visitor.old_error_set, - private_crates }; krate.visit_all_item_likes(&mut DeepVisitor::new(&mut visitor)); } diff --git a/src/librustc_target/abi/mod.rs b/src/librustc_target/abi/mod.rs index 235b530a7ef2f..59eda97a2f9f5 100644 --- a/src/librustc_target/abi/mod.rs +++ b/src/librustc_target/abi/mod.rs @@ -828,12 +828,13 @@ pub enum Variants { index: VariantIdx, }, - /// Enums with more than one inhabited variant: for each case there is - /// a struct, and they all have space reserved for the discriminant, - /// which is the sole field of the enum layout. + /// Enum-likes with more than one inhabited variant: for each case there is + /// a struct, and they all have space reserved for the discriminant. + /// For enums this is the sole field of the layout. Multiple { discr: Scalar, discr_kind: DiscriminantKind, + discr_index: usize, variants: IndexVec, }, } @@ -845,8 +846,9 @@ pub enum DiscriminantKind { /// Niche (values invalid for a type) encoding the discriminant: /// the variant `dataful_variant` contains a niche at an arbitrary - /// offset (field 0 of the enum), which for a variant with discriminant - /// `d` is set to `(d - niche_variants.start).wrapping_add(niche_start)`. + /// offset (field `discr_index` of the enum), which for a variant with + /// discriminant `d` is set to + /// `(d - niche_variants.start).wrapping_add(niche_start)`. /// /// For example, `Option<(usize, &T)>` is represented such that /// `None` has a null pointer for the second tuple field, and diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 8805dade40e4c..0c206b27f8058 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -1362,12 +1362,11 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { let msg = format!("expected type, found variant `{}`", assoc_ident); tcx.sess.span_err(span, &msg); } else if qself_ty.is_enum() { - // Report as incorrect enum variant rather than ambiguous type. let mut err = tcx.sess.struct_span_err( - span, - &format!("no variant `{}` on enum `{}`", &assoc_ident.as_str(), qself_ty), + assoc_ident.span, + &format!("no variant `{}` in enum `{}`", assoc_ident, qself_ty), ); - // Check if it was a typo. + let adt_def = qself_ty.ty_adt_def().expect("enum is not an ADT"); if let Some(suggested_name) = find_best_match_for_name( adt_def.variants.iter().map(|variant| &variant.ident.name), @@ -1375,14 +1374,20 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { None, ) { err.span_suggestion( - span, - "did you mean", - format!("{}::{}", qself_ty, suggested_name), + assoc_ident.span, + "there is a variant with a similar name", + suggested_name.to_string(), Applicability::MaybeIncorrect, ); } else { - err.span_label(span, "unknown variant"); + err.span_label(span, format!("variant not found in `{}`", qself_ty)); } + + if let Some(sp) = tcx.hir().span_if_local(adt_def.did) { + let sp = tcx.sess.source_map().def_span(sp); + err.span_label(sp, format!("variant `{}` not found here", assoc_ident)); + } + err.emit(); } else if !qself_ty.references_error() { // Don't print `TyErr` to the user. diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 5f5c7cfec9578..31b7724d63816 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -14,11 +14,11 @@ use rustc::hir::map as hir_map; use rustc::hir::print; use rustc::infer::type_variable::TypeVariableOrigin; use rustc::traits::Obligation; -use rustc::ty::{self, Adt, Ty, TyCtxt, ToPolyTraitRef, ToPredicate, TypeFoldable}; +use rustc::ty::{self, Ty, TyCtxt, ToPolyTraitRef, ToPredicate, TypeFoldable}; use rustc::ty::print::with_crate_prefix; use syntax_pos::{Span, FileName}; use syntax::ast; -use syntax::util::lev_distance::find_best_match_for_name; +use syntax::util::lev_distance; use std::cmp::Ordering; @@ -188,17 +188,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let actual = self.resolve_type_vars_if_possible(&rcvr_ty); let ty_str = self.ty_to_string(actual); let is_method = mode == Mode::MethodCall; - let mut suggestion = None; let item_kind = if is_method { "method" } else if actual.is_enum() { - if let Adt(ref adt_def, _) = actual.sty { - let names = adt_def.variants.iter().map(|s| &s.ident.name); - suggestion = find_best_match_for_name(names, - &item_name.as_str(), - None); - } - "variant" + "variant or associated item" } else { match (item_name.as_str().chars().next(), actual.is_fresh_ty()) { (Some(name), false) if name.is_lowercase() => { @@ -299,7 +292,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { return; } else { span = item_name.span; - let mut err = struct_span_err!( + struct_span_err!( tcx.sess, span, E0599, @@ -307,17 +300,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { item_kind, item_name, ty_str - ); - if let Some(suggestion) = suggestion { - // enum variant - err.span_suggestion( - span, - "did you mean", - suggestion.to_string(), - Applicability::MaybeIncorrect, - ); - } - err + ) } } else { tcx.sess.diagnostic().struct_dummy() @@ -469,14 +452,36 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { out_of_scope_traits); } + if actual.is_enum() { + let adt_def = actual.ty_adt_def().expect("enum is not an ADT"); + if let Some(suggestion) = lev_distance::find_best_match_for_name( + adt_def.variants.iter().map(|s| &s.ident.name), + &item_name.as_str(), + None, + ) { + err.span_suggestion( + span, + "there is a variant with a similar name", + suggestion.to_string(), + Applicability::MaybeIncorrect, + ); + } + } + if let Some(lev_candidate) = lev_candidate { + let def = lev_candidate.def(); err.span_suggestion( span, - "did you mean", + &format!( + "there is {} {} with a similar name", + def.article(), + def.kind_name(), + ), lev_candidate.ident.to_string(), Applicability::MaybeIncorrect, ); } + err.emit(); } diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index f2682e00430d0..a4d2a3be86330 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::fmt; use std::path::PathBuf; @@ -9,7 +9,7 @@ use rustc::lint::Level; use rustc::session::early_error; use rustc::session::config::{CodegenOptions, DebuggingOptions, ErrorOutputType, Externs}; use rustc::session::config::{nightly_options, build_codegen_options, build_debugging_options, - get_cmd_lint_options}; + get_cmd_lint_options, ExternEntry}; use rustc::session::search_paths::SearchPath; use rustc_driver; use rustc_target::spec::TargetTriple; @@ -578,7 +578,7 @@ fn parse_extern_html_roots( /// error message. // FIXME(eddyb) This shouldn't be duplicated with `rustc::session`. fn parse_externs(matches: &getopts::Matches) -> Result { - let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new(); + let mut externs: BTreeMap<_, ExternEntry> = BTreeMap::new(); for arg in &matches.opt_strs("extern") { let mut parts = arg.splitn(2, '='); let name = parts.next().ok_or("--extern value must not be empty".to_string())?; @@ -588,7 +588,10 @@ fn parse_externs(matches: &getopts::Matches) -> Result { enable `--extern crate_name` without `=path`".to_string()); } let name = name.to_string(); - externs.entry(name).or_default().insert(location); + // For Rustdoc purposes, we can treat all externs as public + externs.entry(name) + .or_default() + .locations.insert(location.clone()); } Ok(Externs::new(externs)) } diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index 412029cf3765f..00ca78b749e9d 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -930,10 +930,10 @@ if (!DOMTokenList.prototype.remove) { returned = checkReturned(ty, output, true); if (output.name === "*" || returned === true) { in_args = false; - var module = false; + var is_module = false; if (input === "*") { - module = true; + is_module = true; } else { var allFound = true; for (var it = 0; allFound === true && it < inputs.length; it++) { @@ -955,7 +955,7 @@ if (!DOMTokenList.prototype.remove) { dontValidate: true, }; } - if (module === true) { + if (is_module === true) { results[fullId] = { id: i, index: -1, @@ -1073,6 +1073,10 @@ if (!DOMTokenList.prototype.remove) { if (index !== -1 || lev <= MAX_LEV_DISTANCE) { if (index !== -1 && paths.length < 2) { lev = 0; + } else if (searchWords[j] === val) { + // Small trick to fix when you're looking for a one letter type + // and there are other short named types. + lev = -1; } if (results[fullId] === undefined) { results[fullId] = { diff --git a/src/libstd/sys/sgx/alloc.rs b/src/libstd/sys/sgx/alloc.rs index b385d567dd8c4..40daec758a9fc 100644 --- a/src/libstd/sys/sgx/alloc.rs +++ b/src/libstd/sys/sgx/alloc.rs @@ -1,4 +1,4 @@ -use crate::alloc::{self, GlobalAlloc, Layout, System}; +use crate::alloc::{GlobalAlloc, Layout, System}; use super::waitqueue::SpinMutex; @@ -36,11 +36,11 @@ unsafe impl GlobalAlloc for System { #[cfg(not(test))] #[no_mangle] pub unsafe extern "C" fn __rust_c_alloc(size: usize, align: usize) -> *mut u8 { - alloc::alloc(Layout::from_size_align_unchecked(size, align)) + crate::alloc::alloc(Layout::from_size_align_unchecked(size, align)) } #[cfg(not(test))] #[no_mangle] pub unsafe extern "C" fn __rust_c_dealloc(ptr: *mut u8, size: usize, align: usize) { - alloc::dealloc(ptr, Layout::from_size_align_unchecked(size, align)) + crate::alloc::dealloc(ptr, Layout::from_size_align_unchecked(size, align)) } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index bcc8fdf8cd4e7..ed207a1692a43 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -68,6 +68,18 @@ pub struct Path { pub segments: Vec, } +impl PartialEq for Path { + fn eq(&self, symbol: &Symbol) -> bool { + self.segments.len() == 1 && { + let name = self.segments[0].ident.name; + // Make sure these symbols are pure strings + debug_assert!(!symbol.is_gensymed()); + debug_assert!(!name.is_gensymed()); + name == *symbol + } + } +} + impl<'a> PartialEq<&'a str> for Path { fn eq(&self, string: &&'a str) -> bool { self.segments.len() == 1 && self.segments[0].ident.name == *string diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index c0bd5c79b1dd1..e00f91e395280 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -81,7 +81,10 @@ impl NestedMetaItem { } /// Returns `true` if this list item is a MetaItem with a name of `name`. - pub fn check_name(&self, name: &str) -> bool { + pub fn check_name(&self, name: T) -> bool + where + Path: PartialEq, + { self.meta_item().map_or(false, |meta_item| meta_item.check_name(name)) } @@ -151,7 +154,10 @@ impl Attribute { /// attribute is marked as used. /// /// To check the attribute name without marking it used, use the `path` field directly. - pub fn check_name(&self, name: &str) -> bool { + pub fn check_name(&self, name: T) -> bool + where + Path: PartialEq, + { let matches = self.path == name; if matches { mark_used(self); @@ -244,7 +250,10 @@ impl MetaItem { } } - pub fn check_name(&self, name: &str) -> bool { + pub fn check_name(&self, name: T) -> bool + where + Path: PartialEq, + { self.path == name } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index dcb55fb572f33..80aeb31337d50 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -28,7 +28,7 @@ use crate::tokenstream::TokenTree; use errors::{DiagnosticBuilder, Handler}; use rustc_data_structures::fx::FxHashMap; use rustc_target::spec::abi::Abi; -use syntax_pos::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP, symbols}; use log::debug; use std::env; @@ -1366,7 +1366,7 @@ impl<'a> Context<'a> { } } else if n == "doc" { if let Some(content) = attr.meta_item_list() { - if content.iter().any(|c| c.check_name("include")) { + if content.iter().any(|c| c.check_name(symbols::include)) { gate_feature!(self, external_doc, attr.span, "#[doc(include = \"...\")] is experimental" ); @@ -1648,25 +1648,25 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { // check for gated attributes self.context.check_attribute(attr, false); - if attr.check_name("doc") { + if attr.check_name(symbols::doc) { if let Some(content) = attr.meta_item_list() { - if content.len() == 1 && content[0].check_name("cfg") { + if content.len() == 1 && content[0].check_name(symbols::cfg) { gate_feature_post!(&self, doc_cfg, attr.span, "#[doc(cfg(...))] is experimental" ); - } else if content.iter().any(|c| c.check_name("masked")) { + } else if content.iter().any(|c| c.check_name(symbols::masked)) { gate_feature_post!(&self, doc_masked, attr.span, "#[doc(masked)] is experimental" ); - } else if content.iter().any(|c| c.check_name("spotlight")) { + } else if content.iter().any(|c| c.check_name(symbols::spotlight)) { gate_feature_post!(&self, doc_spotlight, attr.span, "#[doc(spotlight)] is experimental" ); - } else if content.iter().any(|c| c.check_name("alias")) { + } else if content.iter().any(|c| c.check_name(symbols::alias)) { gate_feature_post!(&self, doc_alias, attr.span, "#[doc(alias = \"...\")] is experimental" ); - } else if content.iter().any(|c| c.check_name("keyword")) { + } else if content.iter().any(|c| c.check_name(symbols::keyword)) { gate_feature_post!(&self, doc_keyword, attr.span, "#[doc(keyword = \"...\")] is experimental" ); @@ -1674,7 +1674,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } } - match BUILTIN_ATTRIBUTES.iter().find(|(name, ..)| attr.path == name) { + match BUILTIN_ATTRIBUTES.iter().find(|(name, ..)| attr.path == *name) { Some(&(name, _, template, _)) => self.check_builtin_attribute(attr, name, template), None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() { // All key-value attributes are restricted to meta-item syntax. @@ -1727,7 +1727,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ast::ItemKind::Struct(..) => { for attr in attr::filter_by_name(&i.attrs[..], "repr") { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { - if item.check_name("simd") { + if item.check_name(symbols::simd) { gate_feature_post!(&self, repr_simd, attr.span, "SIMD types are experimental and possibly buggy"); } @@ -1738,7 +1738,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ast::ItemKind::Enum(..) => { for attr in attr::filter_by_name(&i.attrs[..], "repr") { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { - if item.check_name("align") { + if item.check_name(symbols::align) { gate_feature_post!(&self, repr_align_enum, attr.span, "`#[repr(align(x))]` on enums is experimental"); } @@ -2062,7 +2062,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute], // Process the edition umbrella feature-gates first, to ensure // `edition_enabled_features` is completed before it's queried. for attr in krate_attrs { - if !attr.check_name("feature") { + if !attr.check_name(symbols::feature) { continue } @@ -2107,7 +2107,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute], } for attr in krate_attrs { - if !attr.check_name("feature") { + if !attr.check_name(symbols::feature) { continue } @@ -2237,7 +2237,7 @@ fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, }; if !allow_features { for attr in &krate.attrs { - if attr.check_name("feature") { + if attr.check_name(symbols::feature) { let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)"); span_err!(span_handler, attr.span, E0554, "#![feature] may not be used on the {} release channel", diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 37360a563950b..b51b7fd1ef589 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2165,9 +2165,11 @@ impl<'a> Parser<'a> { suffix, ) = self.token { let suffix = suffix.and_then(|s| { - let s = s.as_str().get(); - if ["f32", "f64"].contains(&s) { - Some(s) + let s = s.as_str(); + if s == "f32" { + Some("f32") + } else if s == "f64" { + Some("f64") } else { None } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f5a9aded8455e..ca05ff71c9433 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -645,7 +645,7 @@ pub trait PrintState<'a> { ast::LitKind::Float(ref f, t) => { self.writer().word(format!("{}{}", &f, t.ty_to_string())) } - ast::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().get()), + ast::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().to_string()), ast::LitKind::Bool(val) => { if val { self.writer().word("true") } else { self.writer().word("false") } } @@ -731,7 +731,7 @@ pub trait PrintState<'a> { if segment.ident.name == keywords::DollarCrate.name() { self.print_dollar_crate(segment.ident)?; } else { - self.writer().word(segment.ident.as_str().get())?; + self.writer().word(segment.ident.as_str().to_string())?; } } } @@ -749,7 +749,7 @@ pub trait PrintState<'a> { } self.maybe_print_comment(attr.span.lo())?; if attr.is_sugared_doc { - self.writer().word(attr.value_str().unwrap().as_str().get())?; + self.writer().word(attr.value_str().unwrap().as_str().to_string())?; self.writer().hardbreak() } else { match attr.style { @@ -858,7 +858,7 @@ pub trait PrintState<'a> { if !ast::Ident::with_empty_ctxt(name).is_path_segment_keyword() { self.writer().word("::")?; } - self.writer().word(name.as_str().get()) + self.writer().word(name.as_str().to_string()) } } @@ -1300,7 +1300,7 @@ impl<'a> State<'a> { } ast::ItemKind::GlobalAsm(ref ga) => { self.head(visibility_qualified(&item.vis, "global_asm!"))?; - self.s.word(ga.asm.as_str().get())?; + self.s.word(ga.asm.as_str().to_string())?; self.end()?; } ast::ItemKind::Ty(ref ty, ref generics) => { @@ -2437,7 +2437,7 @@ impl<'a> State<'a> { if ident.is_raw_guess() { self.s.word(format!("r#{}", ident))?; } else { - self.s.word(ident.as_str().get())?; + self.s.word(ident.as_str().to_string())?; } self.ann.post(self, AnnNode::Ident(&ident)) } @@ -2447,7 +2447,7 @@ impl<'a> State<'a> { } pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> { - self.s.word(name.as_str().get())?; + self.s.word(name.as_str().to_string())?; self.ann.post(self, AnnNode::Name(&name)) } diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_decls.rs index 22fee902aea26..f0390ba3d40cb 100644 --- a/src/libsyntax_ext/proc_macro_decls.rs +++ b/src/libsyntax_ext/proc_macro_decls.rs @@ -87,7 +87,7 @@ pub fn modify(sess: &ParseSess, } pub fn is_proc_macro_attr(attr: &ast::Attribute) -> bool { - PROC_MACRO_KINDS.iter().any(|kind| attr.check_name(kind)) + PROC_MACRO_KINDS.iter().any(|kind| attr.check_name(*kind)) } impl<'a> CollectProcMacros<'a> { diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index f902e8169b6ba..09dce77579001 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -336,11 +336,11 @@ impl Ident { } } fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident { - let string = sym.as_str().get(); - if !Self::is_valid(string) { + let string = sym.as_str(); + if !Self::is_valid(&string) { panic!("`{:?}` is not a valid identifier", string) } - if is_raw && !ast::Ident::from_str(string).can_be_raw() { + if is_raw && !ast::Ident::from_interned_str(sym.as_interned_str()).can_be_raw() { panic!("`{}` cannot be a raw identifier", string); } Ident { sym, is_raw, span } diff --git a/src/libsyntax_pos/Cargo.toml b/src/libsyntax_pos/Cargo.toml index 691abffbbc1af..af7edc0a6bd3e 100644 --- a/src/libsyntax_pos/Cargo.toml +++ b/src/libsyntax_pos/Cargo.toml @@ -11,6 +11,7 @@ crate-type = ["dylib"] [dependencies] serialize = { path = "../libserialize" } +rustc_macros = { path = "../librustc_macros" } rustc_data_structures = { path = "../librustc_data_structures" } arena = { path = "../libarena" } scoped-tls = "1.0" diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index db1543ff13f7e..da857abe66e23 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -16,6 +16,7 @@ #![feature(non_exhaustive)] #![feature(optin_builtin_traits)] #![feature(rustc_attrs)] +#![feature(proc_macro_hygiene)] #![feature(specialization)] #![feature(step_trait)] @@ -32,6 +33,7 @@ mod span_encoding; pub use span_encoding::{Span, DUMMY_SP}; pub mod symbol; +pub use symbol::symbols; mod analyze_source_file; diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index f61aa4284d29b..64dbb500d2e32 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -6,6 +6,7 @@ use arena::DroplessArena; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::newtype_index; +use rustc_macros::symbols; use serialize::{Decodable, Decoder, Encodable, Encoder}; use std::fmt; @@ -16,6 +17,94 @@ use std::hash::{Hash, Hasher}; use crate::hygiene::SyntaxContext; use crate::{Span, DUMMY_SP, GLOBALS}; +symbols! { + // After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`, + // this should be rarely necessary though if the keywords are kept in alphabetic order. + Keywords { + // Special reserved identifiers used internally for elided lifetimes, + // unnamed method parameters, crate root module, error recovery etc. + Invalid: "", + PathRoot: "{{root}}", + DollarCrate: "$crate", + Underscore: "_", + + // Keywords that are used in stable Rust. + As: "as", + Box: "box", + Break: "break", + Const: "const", + Continue: "continue", + Crate: "crate", + Else: "else", + Enum: "enum", + Extern: "extern", + False: "false", + Fn: "fn", + For: "for", + If: "if", + Impl: "impl", + In: "in", + Let: "let", + Loop: "loop", + Match: "match", + Mod: "mod", + Move: "move", + Mut: "mut", + Pub: "pub", + Ref: "ref", + Return: "return", + SelfLower: "self", + SelfUpper: "Self", + Static: "static", + Struct: "struct", + Super: "super", + Trait: "trait", + True: "true", + Type: "type", + Unsafe: "unsafe", + Use: "use", + Where: "where", + While: "while", + + // Keywords that are used in unstable Rust or reserved for future use. + Abstract: "abstract", + Become: "become", + Do: "do", + Final: "final", + Macro: "macro", + Override: "override", + Priv: "priv", + Typeof: "typeof", + Unsized: "unsized", + Virtual: "virtual", + Yield: "yield", + + // Edition-specific keywords that are used in stable Rust. + Dyn: "dyn", // >= 2018 Edition only + + // Edition-specific keywords that are used in unstable Rust or reserved for future use. + Async: "async", // >= 2018 Edition only + Try: "try", // >= 2018 Edition only + + // Special lifetime names + UnderscoreLifetime: "'_", + StaticLifetime: "'static", + + // Weak keywords, have special meaning only in specific contexts. + Auto: "auto", + Catch: "catch", + Default: "default", + Existential: "existential", + Union: "union", + } + + // Other symbols that can be referred to with syntax_pos::symbols::* + Other { + doc, cfg, masked, spotlight, alias, keyword, feature, include, simd, align, stable, + unstable, rustc_const_unstable, + } +} + #[derive(Copy, Clone, Eq)] pub struct Ident { pub name: Symbol, @@ -317,129 +406,32 @@ impl Interner { } } -// In this macro, there is the requirement that the name (the number) must be monotonically -// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero. -macro_rules! declare_keywords {( - $( ($index: expr, $konst: ident, $string: expr) )* -) => { - pub mod keywords { - use super::{Symbol, Ident}; - #[derive(Clone, Copy, PartialEq, Eq)] - pub struct Keyword { - ident: Ident, - } - impl Keyword { - #[inline] pub fn ident(self) -> Ident { self.ident } - #[inline] pub fn name(self) -> Symbol { self.ident.name } - } - $( - #[allow(non_upper_case_globals)] - pub const $konst: Keyword = Keyword { - ident: Ident::with_empty_ctxt(super::Symbol::new($index)) - }; - )* - - impl std::str::FromStr for Keyword { - type Err = (); - - fn from_str(s: &str) -> Result { - match s { - $($string => Ok($konst),)* - _ => Err(()), - } - } - } +pub mod keywords { + use super::{Symbol, Ident}; + + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: Ident, } - impl Interner { - pub fn fresh() -> Self { - Interner::prefill(&[$($string,)*]) + impl Keyword { + #[inline] + pub fn ident(self) -> Ident { + self.ident + } + + #[inline] + pub fn name(self) -> Symbol { + self.ident.name } } -}} -// N.B., leaving holes in the ident table is bad! a different ident will get -// interned with the id from the hole, but it will be between the min and max -// of the reserved words, and thus tagged as "reserved". -// After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`, -// this should be rarely necessary though if the keywords are kept in alphabetic order. -declare_keywords! { - // Special reserved identifiers used internally for elided lifetimes, - // unnamed method parameters, crate root module, error recovery etc. - (0, Invalid, "") - (1, PathRoot, "{{root}}") - (2, DollarCrate, "$crate") - (3, Underscore, "_") - - // Keywords that are used in stable Rust. - (4, As, "as") - (5, Box, "box") - (6, Break, "break") - (7, Const, "const") - (8, Continue, "continue") - (9, Crate, "crate") - (10, Else, "else") - (11, Enum, "enum") - (12, Extern, "extern") - (13, False, "false") - (14, Fn, "fn") - (15, For, "for") - (16, If, "if") - (17, Impl, "impl") - (18, In, "in") - (19, Let, "let") - (20, Loop, "loop") - (21, Match, "match") - (22, Mod, "mod") - (23, Move, "move") - (24, Mut, "mut") - (25, Pub, "pub") - (26, Ref, "ref") - (27, Return, "return") - (28, SelfLower, "self") - (29, SelfUpper, "Self") - (30, Static, "static") - (31, Struct, "struct") - (32, Super, "super") - (33, Trait, "trait") - (34, True, "true") - (35, Type, "type") - (36, Unsafe, "unsafe") - (37, Use, "use") - (38, Where, "where") - (39, While, "while") - - // Keywords that are used in unstable Rust or reserved for future use. - (40, Abstract, "abstract") - (41, Become, "become") - (42, Do, "do") - (43, Final, "final") - (44, Macro, "macro") - (45, Override, "override") - (46, Priv, "priv") - (47, Typeof, "typeof") - (48, Unsized, "unsized") - (49, Virtual, "virtual") - (50, Yield, "yield") - - // Edition-specific keywords that are used in stable Rust. - (51, Dyn, "dyn") // >= 2018 Edition only - - // Edition-specific keywords that are used in unstable Rust or reserved for future use. - (52, Async, "async") // >= 2018 Edition only - (53, Try, "try") // >= 2018 Edition only - - // Special lifetime names - (54, UnderscoreLifetime, "'_") - (55, StaticLifetime, "'static") - - // Weak keywords, have special meaning only in specific contexts. - (56, Auto, "auto") - (57, Catch, "catch") - (58, Default, "default") - (59, Existential, "existential") - (60, Union, "union") + keywords!(); +} + +pub mod symbols { + use super::Symbol; + symbols!(); } impl Symbol { @@ -524,7 +516,11 @@ impl LocalInternedString { } } - pub fn get(&self) -> &'static str { + pub fn get(&self) -> &str { + // This returns a valid string since we ensure that `self` outlives the interner + // by creating the interner on a thread which outlives threads which can access it. + // This type cannot move to a thread which outlives the interner since it does + // not implement Send. self.string } } diff --git a/src/test/run-pass/async-await.rs b/src/test/run-pass/async-await.rs index 4f5f7724ad076..518452aefc152 100644 --- a/src/test/run-pass/async-await.rs +++ b/src/test/run-pass/async-await.rs @@ -19,7 +19,10 @@ struct Counter { } impl ArcWake for Counter { - fn wake(arc_self: &Arc) { + fn wake(self: Arc) { + Self::wake_by_ref(&self) + } + fn wake_by_ref(arc_self: &Arc) { arc_self.wakes.fetch_add(1, atomic::Ordering::SeqCst); } } @@ -34,7 +37,7 @@ impl Future for WakeOnceThenComplete { if self.0 { Poll::Ready(()) } else { - cx.waker().wake(); + cx.waker().wake_by_ref(); self.0 = true; Poll::Pending } diff --git a/src/test/run-pass/auxiliary/arc_wake.rs b/src/test/run-pass/auxiliary/arc_wake.rs index 74ec56f55171a..93e074e7ee55c 100644 --- a/src/test/run-pass/auxiliary/arc_wake.rs +++ b/src/test/run-pass/auxiliary/arc_wake.rs @@ -12,25 +12,30 @@ macro_rules! waker_vtable { &RawWakerVTable::new( clone_arc_raw::<$ty>, wake_arc_raw::<$ty>, + wake_by_ref_arc_raw::<$ty>, drop_arc_raw::<$ty>, ) }; } pub trait ArcWake { - fn wake(arc_self: &Arc); + fn wake(self: Arc); + + fn wake_by_ref(arc_self: &Arc) { + arc_self.clone().wake() + } fn into_waker(wake: Arc) -> Waker where Self: Sized { - let ptr = Arc::into_raw(wake) as *const(); + let ptr = Arc::into_raw(wake) as *const (); unsafe { - Waker::new_unchecked(RawWaker::new(ptr, waker_vtable!(Self))) + Waker::from_raw(RawWaker::new(ptr, waker_vtable!(Self))) } } } -unsafe fn increase_refcount(data: *const()) { +unsafe fn increase_refcount(data: *const ()) { // Retain Arc by creating a copy let arc: Arc = Arc::from_raw(data as *const T); let arc_clone = arc.clone(); @@ -39,18 +44,23 @@ unsafe fn increase_refcount(data: *const()) { let _ = Arc::into_raw(arc_clone); } -unsafe fn clone_arc_raw(data: *const()) -> RawWaker { +unsafe fn clone_arc_raw(data: *const ()) -> RawWaker { increase_refcount::(data); RawWaker::new(data, waker_vtable!(T)) } -unsafe fn drop_arc_raw(data: *const()) { +unsafe fn drop_arc_raw(data: *const ()) { // Drop Arc let _: Arc = Arc::from_raw(data as *const T); } -unsafe fn wake_arc_raw(data: *const()) { +unsafe fn wake_arc_raw(data: *const ()) { + let arc: Arc = Arc::from_raw(data as *const T); + ArcWake::wake(arc); +} + +unsafe fn wake_by_ref_arc_raw(data: *const ()) { let arc: Arc = Arc::from_raw(data as *const T); - ArcWake::wake(&arc); + ArcWake::wake_by_ref(&arc); let _ = Arc::into_raw(arc); } diff --git a/src/test/run-pass/futures-api.rs b/src/test/run-pass/futures-api.rs index 5d0b0db510f41..6094f15569bb6 100644 --- a/src/test/run-pass/futures-api.rs +++ b/src/test/run-pass/futures-api.rs @@ -20,7 +20,10 @@ struct Counter { } impl ArcWake for Counter { - fn wake(arc_self: &Arc) { + fn wake(self: Arc) { + Self::wake_by_ref(&self) + } + fn wake_by_ref(arc_self: &Arc) { arc_self.wakes.fetch_add(1, atomic::Ordering::SeqCst); } } @@ -32,8 +35,8 @@ impl Future for MyFuture { fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { // Wake twice let waker = cx.waker(); - waker.wake(); - waker.wake(); + waker.wake_by_ref(); + waker.wake_by_ref(); Poll::Ready(()) } } diff --git a/src/test/rustdoc-js/search-short-types.js b/src/test/rustdoc-js/search-short-types.js new file mode 100644 index 0000000000000..0ebf4860cfa58 --- /dev/null +++ b/src/test/rustdoc-js/search-short-types.js @@ -0,0 +1,8 @@ +const QUERY = 'P'; + +const EXPECTED = { + 'others': [ + { 'path': 'search_short_types', 'name': 'P' }, + { 'path': 'search_short_types', 'name': 'Ap' }, + ], +}; diff --git a/src/test/rustdoc-js/search-short-types.rs b/src/test/rustdoc-js/search-short-types.rs new file mode 100644 index 0000000000000..2eacc0a358284 --- /dev/null +++ b/src/test/rustdoc-js/search-short-types.rs @@ -0,0 +1,68 @@ +macro_rules! imp { + ($name:ident) => { + pub struct $name { + pub op: usize, + } + impl $name { + pub fn op() {} + pub fn cmp() {} + pub fn map() {} + pub fn pop() {} + pub fn ptr() {} + pub fn rpo() {} + pub fn drop() {} + pub fn copy() {} + pub fn zip() {} + pub fn sup() {} + pub fn pa() {} + pub fn pb() {} + pub fn pc() {} + pub fn pd() {} + pub fn pe() {} + pub fn pf() {} + pub fn pg() {} + pub fn ph() {} + pub fn pi() {} + pub fn pj() {} + pub fn pk() {} + pub fn pl() {} + pub fn pm() {} + pub fn pn() {} + pub fn po() {} + } + }; + ($name:ident, $($names:ident),*) => { + imp!($name); + imp!($($names),*); + }; +} +macro_rules! en { + ($name:ident) => { + pub enum $name { + Ptr, + Rp, + Rpo, + Pt, + Drop, + Dr, + Dro, + Sup, + Op, + Cmp, + Map, + Mp, + } + }; + ($name:ident, $($names:ident),*) => { + en!($name); + en!($($names),*); + }; +} + +imp!(Ot, Foo, Cmp, Map, Loc, Lac, Toc, Si, Sig, Sip, Psy, Psi, Py, Pi, Pa, Pb, Pc, Pd); +imp!(Pe, Pf, Pg, Ph, Pj, Pk, Pl, Pm, Pn, Po, Pq, Pr, Ps, Pt, Pu, Pv, Pw, Px, Pz, Ap, Bp, Cp); +imp!(Dp, Ep, Fp, Gp, Hp, Ip, Jp, Kp, Lp, Mp, Np, Op, Pp, Qp, Rp, Sp, Tp, Up, Vp, Wp, Xp, Yp, Zp); + +en!(Place, Plac, Plae, Plce, Pace, Scalar, Scalr, Scaar, Sclar, Salar); + +pub struct P; diff --git a/src/test/rustdoc-js/substring.js b/src/test/rustdoc-js/substring.js new file mode 100644 index 0000000000000..af05cd1ad34d1 --- /dev/null +++ b/src/test/rustdoc-js/substring.js @@ -0,0 +1,8 @@ +const QUERY = 'waker_from'; + +const EXPECTED = { + 'others': [ + { 'path': 'substring::SuperWaker', 'name': 'local_waker_from_nonlocal' }, + { 'path': 'substring::SuperWakerTask', 'name': 'local_waker_from_nonlocal' }, + ], +}; diff --git a/src/test/rustdoc-js/substring.rs b/src/test/rustdoc-js/substring.rs new file mode 100644 index 0000000000000..e729c722c7999 --- /dev/null +++ b/src/test/rustdoc-js/substring.rs @@ -0,0 +1,21 @@ +pub struct SuperWaker; + +impl SuperWaker { + pub fn local_waker_from_nonlocal() {} + pub fn local_waker_frm_nonlocal() {} + pub fn some_method() {} + pub fn some_other_method() {} + pub fn waker_non_local() {} + pub fn from_non_local() {} +} + +pub struct SuperWakerTask; + +impl SuperWakerTask { + pub fn local_waker_from_nonlocal() {} + pub fn local_waker_frm_nonlocal() {} + pub fn some_method() {} + pub fn some_other_method() {} + pub fn waker_non_local() {} + pub fn from_non_local() {} +} diff --git a/src/test/ui/associated-item/associated-item-enum.rs b/src/test/ui/associated-item/associated-item-enum.rs new file mode 100644 index 0000000000000..30ba258155bb9 --- /dev/null +++ b/src/test/ui/associated-item/associated-item-enum.rs @@ -0,0 +1,20 @@ +enum Enum { Variant } + +impl Enum { + const MISSPELLABLE: i32 = 0; + fn misspellable() {} +} + +trait Trait { + fn misspellable_trait() {} +} + +impl Trait for Enum { + fn misspellable_trait() {} +} + +fn main() { + Enum::mispellable(); //~ ERROR no variant or associated item + Enum::mispellable_trait(); //~ ERROR no variant or associated item + Enum::MISPELLABLE; //~ ERROR no variant or associated item +} diff --git a/src/test/ui/associated-item/associated-item-enum.stderr b/src/test/ui/associated-item/associated-item-enum.stderr new file mode 100644 index 0000000000000..5a62b9736dedd --- /dev/null +++ b/src/test/ui/associated-item/associated-item-enum.stderr @@ -0,0 +1,36 @@ +error[E0599]: no variant or associated item named `mispellable` found for type `Enum` in the current scope + --> $DIR/associated-item-enum.rs:17:11 + | +LL | enum Enum { Variant } + | --------- variant or associated item `mispellable` not found here +... +LL | Enum::mispellable(); + | ^^^^^^^^^^^ + | | + | variant or associated item not found in `Enum` + | help: there is a method with a similar name: `misspellable` + +error[E0599]: no variant or associated item named `mispellable_trait` found for type `Enum` in the current scope + --> $DIR/associated-item-enum.rs:18:11 + | +LL | enum Enum { Variant } + | --------- variant or associated item `mispellable_trait` not found here +... +LL | Enum::mispellable_trait(); + | ^^^^^^^^^^^^^^^^^ variant or associated item not found in `Enum` + +error[E0599]: no variant or associated item named `MISPELLABLE` found for type `Enum` in the current scope + --> $DIR/associated-item-enum.rs:19:11 + | +LL | enum Enum { Variant } + | --------- variant or associated item `MISPELLABLE` not found here +... +LL | Enum::MISPELLABLE; + | ^^^^^^^^^^^ + | | + | variant or associated item not found in `Enum` + | help: there is an associated constant with a similar name: `MISSPELLABLE` + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0599`. diff --git a/src/test/ui/auto-ref-slice-plus-ref.stderr b/src/test/ui/auto-ref-slice-plus-ref.stderr index 97b9cd961a026..f2e0d379d1b30 100644 --- a/src/test/ui/auto-ref-slice-plus-ref.stderr +++ b/src/test/ui/auto-ref-slice-plus-ref.stderr @@ -2,7 +2,7 @@ error[E0599]: no method named `test_mut` found for type `std::vec::Vec<{integer} --> $DIR/auto-ref-slice-plus-ref.rs:7:7 | LL | a.test_mut(); - | ^^^^^^^^ help: did you mean: `get_mut` + | ^^^^^^^^ help: there is a method with a similar name: `get_mut` | = help: items from traits can only be used if the trait is implemented and in scope = note: the following trait defines an item `test_mut`, perhaps you need to implement it: diff --git a/src/test/ui/block-result/issue-3563.stderr b/src/test/ui/block-result/issue-3563.stderr index a6346a5233f4c..237b8c54ce301 100644 --- a/src/test/ui/block-result/issue-3563.stderr +++ b/src/test/ui/block-result/issue-3563.stderr @@ -2,7 +2,7 @@ error[E0599]: no method named `b` found for type `&Self` in the current scope --> $DIR/issue-3563.rs:3:17 | LL | || self.b() - | ^ help: did you mean: `a` + | ^ help: there is a method with a similar name: `a` error: aborting due to previous error diff --git a/src/test/ui/bogus-tag.stderr b/src/test/ui/bogus-tag.stderr index 0bf0d4b14ee91..890f6800c22af 100644 --- a/src/test/ui/bogus-tag.stderr +++ b/src/test/ui/bogus-tag.stderr @@ -1,11 +1,11 @@ -error[E0599]: no variant named `Hsl` found for type `Color` in the current scope +error[E0599]: no variant or associated item named `Hsl` found for type `Color` in the current scope --> $DIR/bogus-tag.rs:7:16 | LL | enum Color { Rgb(isize, isize, isize), Rgba(isize, isize, isize, isize), } - | ---------- variant `Hsl` not found here + | ---------- variant or associated item `Hsl` not found here ... LL | Color::Hsl(h, s, l) => { println!("hsl"); } - | ^^^ variant not found in `Color` + | ^^^ variant or associated item not found in `Color` error: aborting due to previous error diff --git a/src/test/ui/empty/empty-struct-braces-expr.rs b/src/test/ui/empty/empty-struct-braces-expr.rs index 2f2f41ae8c1f0..e33fcb70db775 100644 --- a/src/test/ui/empty/empty-struct-braces-expr.rs +++ b/src/test/ui/empty/empty-struct-braces-expr.rs @@ -19,6 +19,8 @@ fn main() { let xe1 = XEmpty1; //~ ERROR expected value, found struct `XEmpty1` let xe1 = XEmpty1(); //~ ERROR expected function, found struct `XEmpty1` - let xe3 = XE::Empty3; //~ ERROR no variant named `Empty3` found for type - let xe3 = XE::Empty3(); //~ ERROR no variant named `Empty3` found for type + let xe3 = XE::Empty3; //~ ERROR no variant or associated item named `Empty3` found for type + let xe3 = XE::Empty3(); //~ ERROR no variant or associated item named `Empty3` found for type + + XE::Empty1 {}; //~ ERROR no variant `Empty1` in enum `empty_struct::XE` } diff --git a/src/test/ui/empty/empty-struct-braces-expr.stderr b/src/test/ui/empty/empty-struct-braces-expr.stderr index 57c8c1c85dd41..f5609c8e1bf7b 100644 --- a/src/test/ui/empty/empty-struct-braces-expr.stderr +++ b/src/test/ui/empty/empty-struct-braces-expr.stderr @@ -46,25 +46,31 @@ LL | let xe1 = XEmpty1(); | did you mean `XEmpty1 { /* fields */ }`? | help: a unit struct with a similar name exists: `XEmpty2` -error[E0599]: no variant named `Empty3` found for type `empty_struct::XE` in the current scope +error[E0599]: no variant or associated item named `Empty3` found for type `empty_struct::XE` in the current scope --> $DIR/empty-struct-braces-expr.rs:22:19 | LL | let xe3 = XE::Empty3; | ^^^^^^ | | - | variant not found in `empty_struct::XE` - | help: did you mean: `XEmpty3` + | variant or associated item not found in `empty_struct::XE` + | help: there is a variant with a similar name: `XEmpty3` -error[E0599]: no variant named `Empty3` found for type `empty_struct::XE` in the current scope +error[E0599]: no variant or associated item named `Empty3` found for type `empty_struct::XE` in the current scope --> $DIR/empty-struct-braces-expr.rs:23:19 | LL | let xe3 = XE::Empty3(); | ^^^^^^ | | - | variant not found in `empty_struct::XE` - | help: did you mean: `XEmpty3` + | variant or associated item not found in `empty_struct::XE` + | help: there is a variant with a similar name: `XEmpty3` -error: aborting due to 8 previous errors +error: no variant `Empty1` in enum `empty_struct::XE` + --> $DIR/empty-struct-braces-expr.rs:25:9 + | +LL | XE::Empty1 {}; + | ^^^^^^ help: there is a variant with a similar name: `XEmpty3` + +error: aborting due to 9 previous errors Some errors occurred: E0423, E0599. For more information about an error, try `rustc --explain E0423`. diff --git a/src/test/ui/issues/issue-22933-2.rs b/src/test/ui/issues/issue-22933-2.rs index 68d9ef2cfa809..98a354b1bd0fc 100644 --- a/src/test/ui/issues/issue-22933-2.rs +++ b/src/test/ui/issues/issue-22933-2.rs @@ -2,7 +2,7 @@ enum Delicious { Pie = 0x1, Apple = 0x2, ApplePie = Delicious::Apple as isize | Delicious::PIE as isize, - //~^ ERROR no variant named `PIE` found for type `Delicious` + //~^ ERROR no variant or associated item named `PIE` found for type `Delicious` } fn main() {} diff --git a/src/test/ui/issues/issue-22933-2.stderr b/src/test/ui/issues/issue-22933-2.stderr index 23b1474bde704..72038ea20a3fa 100644 --- a/src/test/ui/issues/issue-22933-2.stderr +++ b/src/test/ui/issues/issue-22933-2.stderr @@ -1,11 +1,11 @@ -error[E0599]: no variant named `PIE` found for type `Delicious` in the current scope +error[E0599]: no variant or associated item named `PIE` found for type `Delicious` in the current scope --> $DIR/issue-22933-2.rs:4:55 | LL | enum Delicious { - | -------------- variant `PIE` not found here + | -------------- variant or associated item `PIE` not found here ... LL | ApplePie = Delicious::Apple as isize | Delicious::PIE as isize, - | ^^^ variant not found in `Delicious` + | ^^^ variant or associated item not found in `Delicious` error: aborting due to previous error diff --git a/src/test/ui/issues/issue-23173.rs b/src/test/ui/issues/issue-23173.rs index 2922ebddf4c8a..7c15598448d7f 100644 --- a/src/test/ui/issues/issue-23173.rs +++ b/src/test/ui/issues/issue-23173.rs @@ -6,12 +6,8 @@ struct Struct { fn use_token(token: &Token) { unimplemented!() } fn main() { - use_token(&Token::Homura); - //~^ ERROR no variant named `Homura` - Struct::method(); - //~^ ERROR no function or associated item named `method` found for type - Struct::method; - //~^ ERROR no function or associated item named `method` found for type - Struct::Assoc; - //~^ ERROR no associated item named `Assoc` found for type `Struct` in + use_token(&Token::Homura); //~ ERROR no variant or associated item named `Homura` + Struct::method(); //~ ERROR no function or associated item named `method` found for type + Struct::method; //~ ERROR no function or associated item named `method` found for type + Struct::Assoc; //~ ERROR no associated item named `Assoc` found for type `Struct` in } diff --git a/src/test/ui/issues/issue-23173.stderr b/src/test/ui/issues/issue-23173.stderr index 75dba883608e9..699e41156fa80 100644 --- a/src/test/ui/issues/issue-23173.stderr +++ b/src/test/ui/issues/issue-23173.stderr @@ -1,14 +1,14 @@ -error[E0599]: no variant named `Homura` found for type `Token` in the current scope +error[E0599]: no variant or associated item named `Homura` found for type `Token` in the current scope --> $DIR/issue-23173.rs:9:23 | LL | enum Token { LeftParen, RightParen, Plus, Minus, /* etc */ } - | ---------- variant `Homura` not found here + | ---------- variant or associated item `Homura` not found here ... LL | use_token(&Token::Homura); - | ^^^^^^ variant not found in `Token` + | ^^^^^^ variant or associated item not found in `Token` error[E0599]: no function or associated item named `method` found for type `Struct` in the current scope - --> $DIR/issue-23173.rs:11:13 + --> $DIR/issue-23173.rs:10:13 | LL | struct Struct { | ------------- function or associated item `method` not found for this @@ -17,7 +17,7 @@ LL | Struct::method(); | ^^^^^^ function or associated item not found in `Struct` error[E0599]: no function or associated item named `method` found for type `Struct` in the current scope - --> $DIR/issue-23173.rs:13:13 + --> $DIR/issue-23173.rs:11:13 | LL | struct Struct { | ------------- function or associated item `method` not found for this @@ -26,7 +26,7 @@ LL | Struct::method; | ^^^^^^ function or associated item not found in `Struct` error[E0599]: no associated item named `Assoc` found for type `Struct` in the current scope - --> $DIR/issue-23173.rs:15:13 + --> $DIR/issue-23173.rs:12:13 | LL | struct Struct { | ------------- associated item `Assoc` not found for this diff --git a/src/test/ui/issues/issue-23217.rs b/src/test/ui/issues/issue-23217.rs index 11426df6177ca..157f20d22d8ae 100644 --- a/src/test/ui/issues/issue-23217.rs +++ b/src/test/ui/issues/issue-23217.rs @@ -1,6 +1,5 @@ pub enum SomeEnum { - B = SomeEnum::A, - //~^ ERROR no variant named `A` found for type `SomeEnum` + B = SomeEnum::A, //~ ERROR no variant or associated item named `A` found for type `SomeEnum` } fn main() {} diff --git a/src/test/ui/issues/issue-23217.stderr b/src/test/ui/issues/issue-23217.stderr index 2a982422cab9c..97100ed375374 100644 --- a/src/test/ui/issues/issue-23217.stderr +++ b/src/test/ui/issues/issue-23217.stderr @@ -1,13 +1,13 @@ -error[E0599]: no variant named `A` found for type `SomeEnum` in the current scope +error[E0599]: no variant or associated item named `A` found for type `SomeEnum` in the current scope --> $DIR/issue-23217.rs:2:19 | LL | pub enum SomeEnum { - | ----------------- variant `A` not found here + | ----------------- variant or associated item `A` not found here LL | B = SomeEnum::A, | ^ | | - | variant not found in `SomeEnum` - | help: did you mean: `B` + | variant or associated item not found in `SomeEnum` + | help: there is a variant with a similar name: `B` error: aborting due to previous error diff --git a/src/test/ui/issues/issue-28344.stderr b/src/test/ui/issues/issue-28344.stderr index fcd98b111cfc5..f3a8019e2329c 100644 --- a/src/test/ui/issues/issue-28344.stderr +++ b/src/test/ui/issues/issue-28344.stderr @@ -11,7 +11,7 @@ LL | let x: u8 = BitXor::bitor(0 as u8, 0 as u8); | ^^^^^ | | | function or associated item not found in `dyn std::ops::BitXor<_>` - | help: did you mean: `bitxor` + | help: there is a method with a similar name: `bitxor` error[E0191]: the value of the associated type `Output` (from the trait `std::ops::BitXor`) must be specified --> $DIR/issue-28344.rs:8:13 @@ -26,7 +26,7 @@ LL | let g = BitXor::bitor; | ^^^^^ | | | function or associated item not found in `dyn std::ops::BitXor<_>` - | help: did you mean: `bitxor` + | help: there is a method with a similar name: `bitxor` error: aborting due to 4 previous errors diff --git a/src/test/ui/issues/issue-28971.rs b/src/test/ui/issues/issue-28971.rs index 3f0d2fafb0478..6493565d21647 100644 --- a/src/test/ui/issues/issue-28971.rs +++ b/src/test/ui/issues/issue-28971.rs @@ -1,5 +1,3 @@ -// This should not cause an ICE - enum Foo { Bar(u8) } @@ -7,7 +5,7 @@ fn main(){ foo(|| { match Foo::Bar(1) { Foo::Baz(..) => (), - //~^ ERROR no variant named `Baz` found for type `Foo` + //~^ ERROR no variant or associated item named `Baz` found for type `Foo` _ => (), } }); diff --git a/src/test/ui/issues/issue-28971.stderr b/src/test/ui/issues/issue-28971.stderr index 4781f7abe8116..7411896443dfe 100644 --- a/src/test/ui/issues/issue-28971.stderr +++ b/src/test/ui/issues/issue-28971.stderr @@ -1,14 +1,14 @@ -error[E0599]: no variant named `Baz` found for type `Foo` in the current scope - --> $DIR/issue-28971.rs:9:18 +error[E0599]: no variant or associated item named `Baz` found for type `Foo` in the current scope + --> $DIR/issue-28971.rs:7:18 | LL | enum Foo { - | -------- variant `Baz` not found here + | -------- variant or associated item `Baz` not found here ... LL | Foo::Baz(..) => (), | ^^^ | | - | variant not found in `Foo` - | help: did you mean: `Bar` + | variant or associated item not found in `Foo` + | help: there is a variant with a similar name: `Bar` error: aborting due to previous error diff --git a/src/test/ui/issues/issue-34209.rs b/src/test/ui/issues/issue-34209.rs index 50095be7740ef..fc2c3679e13b8 100644 --- a/src/test/ui/issues/issue-34209.rs +++ b/src/test/ui/issues/issue-34209.rs @@ -4,8 +4,7 @@ enum S { fn bug(l: S) { match l { - S::B { } => { }, - //~^ ERROR no variant `B` on enum `S` + S::B {} => {}, //~ ERROR no variant `B` in enum `S` } } diff --git a/src/test/ui/issues/issue-34209.stderr b/src/test/ui/issues/issue-34209.stderr index 79aba89c1484e..194bb2bfab8ae 100644 --- a/src/test/ui/issues/issue-34209.stderr +++ b/src/test/ui/issues/issue-34209.stderr @@ -1,8 +1,11 @@ -error: no variant `B` on enum `S` - --> $DIR/issue-34209.rs:7:9 +error: no variant `B` in enum `S` + --> $DIR/issue-34209.rs:7:12 | -LL | S::B { } => { }, - | ^^^^ help: did you mean: `S::A` +LL | enum S { + | ------ variant `B` not found here +... +LL | S::B {} => {}, + | ^ help: there is a variant with a similar name: `A` error: aborting due to previous error diff --git a/src/test/ui/issues/issue-50264-inner-deref-trait/result-deref-err.stderr b/src/test/ui/issues/issue-50264-inner-deref-trait/result-deref-err.stderr index 96d6814b0fe93..333036127eadb 100644 --- a/src/test/ui/issues/issue-50264-inner-deref-trait/result-deref-err.stderr +++ b/src/test/ui/issues/issue-50264-inner-deref-trait/result-deref-err.stderr @@ -2,7 +2,7 @@ error[E0599]: no method named `deref_err` found for type `std::result::Result<_, --> $DIR/result-deref-err.rs:4:28 | LL | let _result = &Err(41).deref_err(); - | ^^^^^^^^^ help: did you mean: `deref_ok` + | ^^^^^^^^^ help: there is a method with a similar name: `deref_ok` | = note: the method `deref_err` exists but the following trait bounds were not satisfied: `{integer} : std::ops::Deref` diff --git a/src/test/ui/nll/extra-unused-mut.rs b/src/test/ui/nll/extra-unused-mut.rs index d5f0b0ddf18bf..6d0d6e16a6775 100644 --- a/src/test/ui/nll/extra-unused-mut.rs +++ b/src/test/ui/nll/extra-unused-mut.rs @@ -1,6 +1,6 @@ // extra unused mut lint tests for #51918 -// run-pass +// compile-pass #![feature(generators, nll)] #![deny(unused_mut)] @@ -53,11 +53,14 @@ fn if_guard(x: Result) { } } -fn main() { - ref_argument(0); - mutable_upvar(); - generator_mutable_upvar(); - ref_closure_argument(); - parse_dot_or_call_expr_with(Vec::new()); - if_guard(Ok(0)); +// #59620 +fn nested_closures() { + let mut i = 0; + [].iter().for_each(|_: &i32| { + [].iter().for_each(move |_: &i32| { + i += 1; + }); + }); } + +fn main() {} diff --git a/src/test/ui/privacy/pub-priv-dep/pub-priv1.rs b/src/test/ui/privacy/pub-priv-dep/pub-priv1.rs index 9ebc96017fe9c..784615354a95c 100644 --- a/src/test/ui/privacy/pub-priv-dep/pub-priv1.rs +++ b/src/test/ui/privacy/pub-priv-dep/pub-priv1.rs @@ -1,6 +1,6 @@ // aux-build:priv_dep.rs // aux-build:pub_dep.rs - // compile-flags: --extern-private priv_dep + // extern-private:priv_dep #![deny(exported_private_dependencies)] // This crate is a private dependency diff --git a/src/test/ui/suggestions/suggest-methods.stderr b/src/test/ui/suggestions/suggest-methods.stderr index 09d58575d97dd..ad4a4deb5a886 100644 --- a/src/test/ui/suggestions/suggest-methods.stderr +++ b/src/test/ui/suggestions/suggest-methods.stderr @@ -5,19 +5,19 @@ LL | struct Foo; | ----------- method `bat` not found for this ... LL | f.bat(1.0); - | ^^^ help: did you mean: `bar` + | ^^^ help: there is a method with a similar name: `bar` error[E0599]: no method named `is_emtpy` found for type `std::string::String` in the current scope --> $DIR/suggest-methods.rs:21:15 | LL | let _ = s.is_emtpy(); - | ^^^^^^^^ help: did you mean: `is_empty` + | ^^^^^^^^ help: there is a method with a similar name: `is_empty` error[E0599]: no method named `count_eos` found for type `u32` in the current scope --> $DIR/suggest-methods.rs:25:19 | LL | let _ = 63u32.count_eos(); - | ^^^^^^^^^ help: did you mean: `count_zeros` + | ^^^^^^^^^ help: there is a method with a similar name: `count_zeros` error[E0599]: no method named `count_o` found for type `u32` in the current scope --> $DIR/suggest-methods.rs:28:19 diff --git a/src/test/ui/suggestions/suggest-variants.rs b/src/test/ui/suggestions/suggest-variants.rs index 6d6e280d9652f..d418834432e08 100644 --- a/src/test/ui/suggestions/suggest-variants.rs +++ b/src/test/ui/suggestions/suggest-variants.rs @@ -12,4 +12,7 @@ fn main() { println!("My shape is {:?}", Shape::Squareee { size: 5}); //~ ERROR no variant `Squareee` println!("My shape is {:?}", Shape::Circl { size: 5}); //~ ERROR no variant `Circl` println!("My shape is {:?}", Shape::Rombus{ size: 5}); //~ ERROR no variant `Rombus` + Shape::Squareee; //~ ERROR no variant + Shape::Circl; //~ ERROR no variant + Shape::Rombus; //~ ERROR no variant } diff --git a/src/test/ui/suggestions/suggest-variants.stderr b/src/test/ui/suggestions/suggest-variants.stderr index 08ae68ea71302..ef0ba70c34066 100644 --- a/src/test/ui/suggestions/suggest-variants.stderr +++ b/src/test/ui/suggestions/suggest-variants.stderr @@ -1,20 +1,65 @@ -error: no variant `Squareee` on enum `Shape` - --> $DIR/suggest-variants.rs:12:34 +error: no variant `Squareee` in enum `Shape` + --> $DIR/suggest-variants.rs:12:41 | +LL | enum Shape { + | ---------- variant `Squareee` not found here +... LL | println!("My shape is {:?}", Shape::Squareee { size: 5}); - | ^^^^^^^^^^^^^^^ help: did you mean: `Shape::Square` + | ^^^^^^^^ help: there is a variant with a similar name: `Square` -error: no variant `Circl` on enum `Shape` - --> $DIR/suggest-variants.rs:13:34 +error: no variant `Circl` in enum `Shape` + --> $DIR/suggest-variants.rs:13:41 | +LL | enum Shape { + | ---------- variant `Circl` not found here +... LL | println!("My shape is {:?}", Shape::Circl { size: 5}); - | ^^^^^^^^^^^^ help: did you mean: `Shape::Circle` + | ^^^^^ help: there is a variant with a similar name: `Circle` -error: no variant `Rombus` on enum `Shape` - --> $DIR/suggest-variants.rs:14:34 +error: no variant `Rombus` in enum `Shape` + --> $DIR/suggest-variants.rs:14:41 | +LL | enum Shape { + | ---------- variant `Rombus` not found here +... LL | println!("My shape is {:?}", Shape::Rombus{ size: 5}); - | ^^^^^^^^^^^^^ unknown variant + | -------^^^^^^ + | | + | variant not found in `Shape` -error: aborting due to 3 previous errors +error[E0599]: no variant or associated item named `Squareee` found for type `Shape` in the current scope + --> $DIR/suggest-variants.rs:15:12 + | +LL | enum Shape { + | ---------- variant or associated item `Squareee` not found here +... +LL | Shape::Squareee; + | ^^^^^^^^ + | | + | variant or associated item not found in `Shape` + | help: there is a variant with a similar name: `Square` + +error[E0599]: no variant or associated item named `Circl` found for type `Shape` in the current scope + --> $DIR/suggest-variants.rs:16:12 + | +LL | enum Shape { + | ---------- variant or associated item `Circl` not found here +... +LL | Shape::Circl; + | ^^^^^ + | | + | variant or associated item not found in `Shape` + | help: there is a variant with a similar name: `Circle` + +error[E0599]: no variant or associated item named `Rombus` found for type `Shape` in the current scope + --> $DIR/suggest-variants.rs:17:12 + | +LL | enum Shape { + | ---------- variant or associated item `Rombus` not found here +... +LL | Shape::Rombus; + | ^^^^^^ variant or associated item not found in `Shape` + +error: aborting due to 6 previous errors +For more information about this error, try `rustc --explain E0599`. diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index d735d3351e666..64882c603bad3 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -286,6 +286,10 @@ pub struct TestProps { // directory as the test, but for backwards compatibility reasons // we also check the auxiliary directory) pub aux_builds: Vec, + // A list of crates to pass '--extern-private name:PATH' flags for + // This should be a subset of 'aux_build' + // FIXME: Replace this with a better solution: https://github.com/rust-lang/rust/pull/54020 + pub extern_private: Vec, // Environment settings to use for compiling pub rustc_env: Vec<(String, String)>, // Environment settings to use during execution @@ -303,6 +307,10 @@ pub struct TestProps { // For UI tests, allows compiler to generate arbitrary output to stderr pub dont_check_compiler_stderr: bool, // Don't force a --crate-type=dylib flag on the command line + // + // Set this for example if you have an auxiliary test file that contains + // a proc-macro and needs `#![crate_type = "proc-macro"]`. This ensures + // that the aux file is compiled as a `proc-macro` and not as a `dylib`. pub no_prefer_dynamic: bool, // Run --pretty expanded when running pretty printing tests pub pretty_expanded: bool, @@ -349,6 +357,7 @@ impl TestProps { run_flags: None, pp_exact: None, aux_builds: vec![], + extern_private: vec![], revisions: vec![], rustc_env: vec![], exec_env: vec![], @@ -465,6 +474,10 @@ impl TestProps { self.aux_builds.push(ab); } + if let Some(ep) = config.parse_extern_private(ln) { + self.extern_private.push(ep); + } + if let Some(ee) = config.parse_env(ln, "exec-env") { self.exec_env.push(ee); } @@ -490,7 +503,7 @@ impl TestProps { } if !self.compile_pass { - // run-pass implies must_compile_successfully + // run-pass implies compile_pass self.compile_pass = config.parse_compile_pass(ln) || self.run_pass; } @@ -606,6 +619,10 @@ impl Config { .map(|r| r.trim().to_string()) } + fn parse_extern_private(&self, line: &str) -> Option { + self.parse_name_value_directive(line, "extern-private") + } + fn parse_compile_flags(&self, line: &str) -> Option { self.parse_name_value_directive(line, "compile-flags") } diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 2021dd513aa62..cec1d83eb0262 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -74,6 +74,17 @@ pub fn dylib_env_var() -> &'static str { } } +/// The platform-specific library file extension +pub fn lib_extension() -> &'static str { + if cfg!(windows) { + ".dll" + } else if cfg!(target_os = "macos") { + ".dylib" + } else { + ".so" + } +} + #[derive(Debug, PartialEq)] pub enum DiffLine { Context(String), @@ -1585,6 +1596,13 @@ impl<'test> TestCx<'test> { create_dir_all(&aux_dir).unwrap(); } + for priv_dep in &self.props.extern_private { + let lib_name = format!("lib{}{}", priv_dep, lib_extension()); + rustc + .arg("--extern-private") + .arg(format!("{}={}", priv_dep, aux_dir.join(lib_name).to_str().unwrap())); + } + for rel_ab in &self.props.aux_builds { let aux_testpaths = self.compute_aux_test_paths(rel_ab); let aux_props =