diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 1997e2aab35e8..ca6b2d03001ce 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -90,7 +90,7 @@ impl<'a> DefCollector<'a> { } } - pub fn visit_macro_invoc(&mut self, id: NodeId) { + fn visit_macro_invoc(&mut self, id: NodeId) { self.definitions.set_invocation_parent(id.placeholder_to_expn_id(), self.parent_def); } } diff --git a/src/librustc/infer/lexical_region_resolve/mod.rs b/src/librustc/infer/lexical_region_resolve/mod.rs index f11f94c428e86..6f55ade1e8612 100644 --- a/src/librustc/infer/lexical_region_resolve/mod.rs +++ b/src/librustc/infer/lexical_region_resolve/mod.rs @@ -304,7 +304,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) { - self.iterate_until_fixed_point("Expansion", |constraint| { + self.iterate_until_fixed_point(|constraint| { debug!("expansion: constraint={:?}", constraint); let (a_region, b_vid, b_data, retain) = match *constraint { Constraint::RegSubVar(a_region, b_vid) => { @@ -360,13 +360,21 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { match *b_data { VarValue::Value(cur_region) => { // Identical scopes can show up quite often, if the fixed point - // iteration converges slowly, skip them + // iteration converges slowly. Skip them. This is purely an + // optimization. if let (ReScope(a_scope), ReScope(cur_scope)) = (a_region, cur_region) { if a_scope == cur_scope { return false; } } + // This is a specialized version of the `lub_concrete_regions` + // check below for a common case, here purely as an + // optimization. + if let ReEmpty = a_region { + return false; + } + let mut lub = self.lub_concrete_regions(a_region, cur_region); if lub == cur_region { return false; @@ -407,8 +415,6 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { /// Returns the smallest region `c` such that `a <= c` and `b <= c`. fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> { - let tcx = self.tcx(); - match (a, b) { (&ty::ReClosureBound(..), _) | (_, &ty::ReClosureBound(..)) @@ -468,7 +474,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // otherwise, we don't know what the free region is, // so we must conservatively say the LUB is static: - tcx.lifetimes.re_static + self.tcx().lifetimes.re_static } (&ReScope(a_id), &ReScope(b_id)) => { @@ -476,7 +482,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // subtype of the region corresponding to an inner // block. let lub = self.region_rels.region_scope_tree.nearest_common_ancestor(a_id, b_id); - tcx.mk_region(ReScope(lub)) + self.tcx().mk_region(ReScope(lub)) } (&ReEarlyBound(_), &ReEarlyBound(_)) @@ -490,7 +496,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { if a == b { a } else { - tcx.lifetimes.re_static + self.tcx().lifetimes.re_static } } } @@ -860,7 +866,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } } - fn iterate_until_fixed_point(&self, tag: &str, mut body: F) + fn iterate_until_fixed_point(&self, mut body: F) where F: FnMut(&Constraint<'tcx>) -> (bool, bool), { @@ -870,7 +876,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { while changed { changed = false; iteration += 1; - debug!("---- {} Iteration {}{}", "#", tag, iteration); + debug!("---- Expansion iteration {}", iteration); constraints.retain(|constraint| { let (edge_changed, retain) = body(constraint); if edge_changed { @@ -880,7 +886,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { retain }); } - debug!("---- {} Complete after {} iteration(s)", tag, iteration); + debug!("---- Expansion complete after {} iteration(s)", iteration); } fn bound_is_met( diff --git a/src/librustc/mir/interpret/error.rs b/src/librustc/mir/interpret/error.rs index 71967b513a049..ac99ccd45eafe 100644 --- a/src/librustc/mir/interpret/error.rs +++ b/src/librustc/mir/interpret/error.rs @@ -389,10 +389,6 @@ pub enum UnsupportedOpInfo<'tcx> { /// Free-form case. Only for errors that are never caught! Unsupported(String), - /// FIXME(#64506) Error used to work around accessing projections of - /// uninhabited types. - UninhabitedValue, - // -- Everything below is not categorized yet -- FunctionAbiMismatch(Abi, Abi), FunctionArgMismatch(Ty<'tcx>, Ty<'tcx>), @@ -556,8 +552,6 @@ impl fmt::Debug for UnsupportedOpInfo<'tcx> { not a power of two"), Unsupported(ref msg) => write!(f, "{}", msg), - UninhabitedValue => - write!(f, "tried to use an uninhabited value"), } } } diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index ce7e1822d9ab5..aed9e87a168ce 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -824,10 +824,14 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { }); (present_variants.next(), present_variants.next()) }; - if present_first.is_none() { + let present_first = match present_first { + present_first @ Some(_) => present_first, // Uninhabited because it has no variants, or only absent ones. - return tcx.layout_raw(param_env.and(tcx.types.never)); - } + None if def.is_enum() => return tcx.layout_raw(param_env.and(tcx.types.never)), + // if it's a struct, still compute a layout so that we can still compute the + // field offsets + None => Some(VariantIdx::new(0)), + }; let is_struct = !def.is_enum() || // Only one variant is present. diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 3ba989529f18f..903eb3c1c44b9 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -9,7 +9,7 @@ use rustc::mir; use rustc::mir::interpret::truncate; use rustc::ty::{self, Ty}; use rustc::ty::layout::{ - self, Size, Abi, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt + self, Size, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt }; use rustc::ty::TypeFoldable; @@ -377,20 +377,17 @@ where layout::FieldPlacement::Array { stride, .. } => { let len = base.len(self)?; if field >= len { - // This can be violated because this runs during promotion on code where the - // type system has not yet ensured that such things don't happen. + // This can be violated because the index (field) can be a runtime value + // provided by the user. debug!("tried to access element {} of array/slice with length {}", field, len); throw_panic!(BoundsCheck { len, index: field }); } stride * field } layout::FieldPlacement::Union(count) => { - // FIXME(#64506) `UninhabitedValue` can be removed when this issue is resolved - if base.layout.abi == Abi::Uninhabited { - throw_unsup!(UninhabitedValue); - } assert!(field < count as u64, - "Tried to access field {} of union with {} fields", field, count); + "Tried to access field {} of union {:#?} with {} fields", + field, base.layout, count); // Offset is always 0 Size::from_bytes(0) } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 2b8965aa5d41c..66f04a2164ebd 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -163,25 +163,15 @@ impl<'a> Resolver<'a> { Some(ext) } - // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders. crate fn build_reduced_graph( &mut self, fragment: &AstFragment, - extra_placeholders: &[NodeId], parent_scope: ParentScope<'a>, ) -> LegacyScope<'a> { let mut def_collector = DefCollector::new(&mut self.definitions, parent_scope.expansion); fragment.visit_with(&mut def_collector); - for placeholder in extra_placeholders { - def_collector.visit_macro_invoc(*placeholder); - } - let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope }; fragment.visit_with(&mut visitor); - for placeholder in extra_placeholders { - visitor.parent_scope.legacy = visitor.visit_invoc(*placeholder); - } - visitor.parent_scope.legacy } @@ -1064,8 +1054,17 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { None } + // Mark the given macro as unused unless its name starts with `_`. + // Macro uses will remove items from this set, and the remaining + // items will be reported as `unused_macros`. + fn insert_unused_macro(&mut self, ident: Ident, node_id: NodeId, span: Span) { + if !ident.as_str().starts_with("_") { + self.r.unused_macros.insert(node_id, span); + } + } + fn define_macro(&mut self, item: &ast::Item) -> LegacyScope<'a> { - let parent_scope = &self.parent_scope; + let parent_scope = self.parent_scope; let expansion = parent_scope.expansion; let (ext, ident, span, is_legacy) = match &item.kind { ItemKind::MacroDef(def) => { @@ -1105,7 +1104,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { (res, vis, span, expansion, IsMacroExport)); } else { self.r.check_reserved_macro_name(ident, res); - self.r.unused_macros.insert(item.id, span); + self.insert_unused_macro(ident, item.id, span); } LegacyScope::Binding(self.r.arenas.alloc_legacy_binding(LegacyBinding { parent_legacy_scope: parent_scope.legacy, binding, ident @@ -1114,7 +1113,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { let module = parent_scope.module; let vis = self.resolve_visibility(&item.vis); if vis != ty::Visibility::Public { - self.r.unused_macros.insert(item.id, span); + self.insert_unused_macro(ident, item.id, span); } self.r.define(module, ident, MacroNS, (res, vis, span, expansion)); self.parent_scope.legacy diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 49186088fa06a..e9975f7647a9f 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -108,15 +108,11 @@ impl<'a> base::Resolver for Resolver<'a> { }); } - // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders. - fn visit_ast_fragment_with_placeholders( - &mut self, expansion: ExpnId, fragment: &AstFragment, extra_placeholders: &[NodeId] - ) { + fn visit_ast_fragment_with_placeholders(&mut self, expansion: ExpnId, fragment: &AstFragment) { // Integrate the new AST fragment into all the definition and module structures. // We are inside the `expansion` now, but other parent scope components are still the same. let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] }; - let output_legacy_scope = - self.build_reduced_graph(fragment, extra_placeholders, parent_scope); + let output_legacy_scope = self.build_reduced_graph(fragment, parent_scope); self.output_legacy_scopes.insert(expansion, output_legacy_scope); parent_scope.module.unexpanded_invocations.borrow_mut().remove(&expansion); diff --git a/src/librustc_target/abi/mod.rs b/src/librustc_target/abi/mod.rs index 26d37f196befa..fde5c5bed4d91 100644 --- a/src/librustc_target/abi/mod.rs +++ b/src/librustc_target/abi/mod.rs @@ -738,7 +738,11 @@ impl FieldPlacement { pub fn offset(&self, i: usize) -> Size { match *self { - FieldPlacement::Union(_) => Size::ZERO, + FieldPlacement::Union(count) => { + assert!(i < count, + "Tried to access field {} of union with {} fields", i, count); + Size::ZERO + }, FieldPlacement::Array { stride, count } => { let i = i as u64; assert!(i < count); diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 7bef693a5be4c..c2c883fd20e7a 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -551,7 +551,7 @@ impl MetaItem { impl MetaItemKind { pub fn tokens(&self, span: Span) -> TokenStream { match *self { - MetaItemKind::Word => TokenStream::empty(), + MetaItemKind::Word => TokenStream::default(), MetaItemKind::NameValue(ref lit) => { let mut vec = vec![TokenTree::token(token::Eq, span).into()]; lit.tokens().append_to_tree_and_joint_vec(&mut vec); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 583fb3f770183..0bf21c45a347c 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -849,8 +849,7 @@ pub trait Resolver { fn next_node_id(&mut self) -> NodeId; fn resolve_dollar_crates(&mut self); - fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment, - extra_placeholders: &[NodeId]); + fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment); fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension); fn expansion_for_ast_pass( diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 1e44f3dd5e59a..bbcd55b0cdb76 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -24,7 +24,6 @@ use errors::{Applicability, FatalError}; use smallvec::{smallvec, SmallVec}; use syntax_pos::{Span, DUMMY_SP, FileName}; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; use std::io::ErrorKind; use std::{iter, mem, slice}; @@ -73,6 +72,22 @@ macro_rules! ast_fragments { } impl AstFragment { + pub fn add_placeholders(&mut self, placeholders: &[NodeId]) { + if placeholders.is_empty() { + return; + } + match self { + $($(AstFragment::$Kind(ast) => ast.extend(placeholders.iter().flat_map(|id| { + // We are repeating through arguments with `many`, to do that we have to + // mention some macro variable from those arguments even if it's not used. + #[cfg_attr(bootstrap, allow(unused_macros))] + macro _repeating($flat_map_ast_elt) {} + placeholder(AstFragmentKind::$Kind, *id).$make_ast() + })),)?)* + _ => panic!("unexpected AST fragment kind") + } + } + pub fn make_opt_expr(self) -> Option> { match self { AstFragment::OptExpr(expr) => expr, @@ -340,7 +355,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { // Unresolved macros produce dummy outputs as a recovery measure. invocations.reverse(); let mut expanded_fragments = Vec::new(); - let mut all_derive_placeholders: FxHashMap> = FxHashMap::default(); let mut undetermined_invocations = Vec::new(); let (mut progress, mut force) = (false, !self.monotonic); loop { @@ -418,9 +432,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.resolver.add_derives(invoc.expansion_data.id, SpecialDerives::COPY); } - let derive_placeholders = - all_derive_placeholders.entry(invoc.expansion_data.id).or_default(); - derive_placeholders.reserve(derives.len()); + let mut derive_placeholders = Vec::with_capacity(derives.len()); invocations.reserve(derives.len()); for path in derives { let expn_id = ExpnId::fresh(None); @@ -436,7 +448,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } let fragment = invoc.fragment_kind .expect_from_annotatables(::std::iter::once(item)); - self.collect_invocations(fragment, derive_placeholders) + self.collect_invocations(fragment, &derive_placeholders) } }; @@ -455,10 +467,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic); while let Some(expanded_fragments) = expanded_fragments.pop() { for (expn_id, expanded_fragment) in expanded_fragments.into_iter().rev() { - let derive_placeholders = - all_derive_placeholders.remove(&expn_id).unwrap_or_else(Vec::new); placeholder_expander.add(NodeId::placeholder_from_expn_id(expn_id), - expanded_fragment, derive_placeholders); + expanded_fragment); } } fragment_with_placeholders.mut_visit_with(&mut placeholder_expander); @@ -491,13 +501,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { monotonic: self.monotonic, }; fragment.mut_visit_with(&mut collector); + fragment.add_placeholders(extra_placeholders); collector.invocations }; - // FIXME: Merge `extra_placeholders` into the `fragment` as regular placeholders. if self.monotonic { self.cx.resolver.visit_ast_fragment_with_placeholders( - self.cx.current_expansion.id, &fragment, extra_placeholders); + self.cx.current_expansion.id, &fragment + ); } (fragment, invocations) @@ -676,12 +687,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } } Some(TokenTree::Token(..)) => {} - None => return TokenStream::empty(), + None => return TokenStream::default(), } self.cx.span_err(span, "custom attribute invocations must be \ of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \ followed by a delimiter token"); - TokenStream::empty() + TokenStream::default() } fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) { diff --git a/src/libsyntax/ext/mbe/transcribe.rs b/src/libsyntax/ext/mbe/transcribe.rs index ba818ebd35c7f..da930436d817b 100644 --- a/src/libsyntax/ext/mbe/transcribe.rs +++ b/src/libsyntax/ext/mbe/transcribe.rs @@ -95,7 +95,7 @@ pub(super) fn transcribe( ) -> TokenStream { // Nothing for us to transcribe... if src.is_empty() { - return TokenStream::empty(); + return TokenStream::default(); } // We descend into the RHS (`src`), expanding things as we go. This stack contains the things diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 8eecef1020d0a..74b2fb31e45a5 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -1,4 +1,4 @@ -use crate::ast::{self, NodeId}; +use crate::ast; use crate::source_map::{DUMMY_SP, dummy_spanned}; use crate::ext::base::ExtCtxt; use crate::ext::expand::{AstFragment, AstFragmentKind}; @@ -15,7 +15,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { fn mac_placeholder() -> ast::Mac { ast::Mac { path: ast::Path { span: DUMMY_SP, segments: Vec::new() }, - tts: TokenStream::empty().into(), + tts: TokenStream::default().into(), delim: ast::MacDelimiter::Brace, span: DUMMY_SP, prior_type_ascription: None, @@ -32,12 +32,12 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { attrs: ThinVec::new(), kind: ast::ExprKind::Mac(mac_placeholder()), }); - let ty = P(ast::Ty { + let ty = || P(ast::Ty { id, kind: ast::TyKind::Mac(mac_placeholder()), span, }); - let pat = P(ast::Pat { + let pat = || P(ast::Pat { id, kind: ast::PatKind::Mac(mac_placeholder()), span, @@ -83,7 +83,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { body: expr_placeholder(), guard: None, id, - pat, + pat: pat(), span, is_placeholder: true, } @@ -105,7 +105,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { id, ident, is_shorthand: false, - pat, + pat: pat(), span, is_placeholder: true, } @@ -124,9 +124,9 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { ast::Param { attrs: Default::default(), id, - pat, + pat: pat(), span, - ty, + ty: ty(), is_placeholder: true, } ]), @@ -136,7 +136,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { id, ident: None, span, - ty, + ty: ty(), vis, is_placeholder: true, } @@ -170,17 +170,8 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { } } - pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment, placeholders: Vec) { + pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment) { fragment.mut_visit_with(self); - if let AstFragment::Items(mut items) = fragment { - for placeholder in placeholders { - match self.remove(placeholder) { - AstFragment::Items(derived_items) => items.extend(derived_items), - _ => unreachable!(), - } - } - fragment = AstFragment::Items(items); - } self.expanded_fragments.insert(id, fragment); } diff --git a/src/libsyntax/ext/proc_macro_server.rs b/src/libsyntax/ext/proc_macro_server.rs index 65037c4a0b8ba..08142ba6c58c5 100644 --- a/src/libsyntax/ext/proc_macro_server.rs +++ b/src/libsyntax/ext/proc_macro_server.rs @@ -394,7 +394,7 @@ impl server::Types for Rustc<'_> { impl server::TokenStream for Rustc<'_> { fn new(&mut self) -> Self::TokenStream { - TokenStream::empty() + TokenStream::default() } fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 09a47795a822a..52c444069a4aa 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -11,6 +11,7 @@ #![feature(const_fn)] #![feature(const_transmute)] #![feature(crate_visibility_modifier)] +#![feature(decl_macro)] #![feature(label_break_value)] #![feature(nll)] #![feature(proc_macro_diagnostic)] diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 3923b9f297b9f..60ee17d09b755 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -610,10 +610,8 @@ pub fn noop_visit_tt(tt: &mut TokenTree, vis: &mut T) { } pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &mut T) { - visit_opt(tts, |tts| { - let tts = Lrc::make_mut(tts); - visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree)); - }) + let tts = Lrc::make_mut(tts); + visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree)); } // Applies ident visitor if it's an ident; applies other visits to interpolated nodes. diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e74f3045db804..0963efcfc8ac0 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -203,7 +203,7 @@ impl<'a> Parser<'a> { }; TokenStream::from_streams(smallvec![eq.into(), tokens]) } else { - TokenStream::empty() + TokenStream::default() }; ast::AttrItem { path, tokens } }) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 400bf0f3856aa..478cfefc2247b 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1273,7 +1273,7 @@ impl<'a> Parser<'a> { // This can happen due to a bad interaction of two unrelated recovery mechanisms with // mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(` // (#62881). - return Ok((ret?, TokenStream::new(vec![]))); + return Ok((ret?, TokenStream::default())); } else { &mut self.token_cursor.stack[prev].last_token }; @@ -1288,7 +1288,7 @@ impl<'a> Parser<'a> { // This can happen due to a bad interaction of two unrelated recovery mechanisms // with mismatched delimiters *and* recovery lookahead on the likely typo // `pub ident(` (#62895, different but similar to the case above). - return Ok((ret?, TokenStream::new(vec![]))); + return Ok((ret?, TokenStream::default())); } }; diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 970bacdde13a7..0ff1c26bac2b2 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -136,13 +136,8 @@ impl TokenTree { /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s /// instead of a representation of the abstract syntax tree. /// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat. -/// -/// The use of `Option` is an optimization that avoids the need for an -/// allocation when the stream is empty. However, it is not guaranteed that an -/// empty stream is represented with `None`; it may be represented as a `Some` -/// around an empty `Vec`. -#[derive(Clone, Debug)] -pub struct TokenStream(pub Option>>); +#[derive(Clone, Debug, Default)] +pub struct TokenStream(pub Lrc>); pub type TreeAndJoint = (TokenTree, IsJoint); @@ -163,36 +158,34 @@ impl TokenStream { /// separating the two arguments with a comma for diagnostic suggestions. pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> { // Used to suggest if a user writes `foo!(a b);` - if let Some(ref stream) = self.0 { - let mut suggestion = None; - let mut iter = stream.iter().enumerate().peekable(); - while let Some((pos, ts)) = iter.next() { - if let Some((_, next)) = iter.peek() { - let sp = match (&ts, &next) { - (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue, - ((TokenTree::Token(token_left), NonJoint), - (TokenTree::Token(token_right), _)) - if ((token_left.is_ident() && !token_left.is_reserved_ident()) - || token_left.is_lit()) && - ((token_right.is_ident() && !token_right.is_reserved_ident()) - || token_right.is_lit()) => token_left.span, - ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(), - _ => continue, - }; - let sp = sp.shrink_to_hi(); - let comma = (TokenTree::token(token::Comma, sp), NonJoint); - suggestion = Some((pos, comma, sp)); - } - } - if let Some((pos, comma, sp)) = suggestion { - let mut new_stream = vec![]; - let parts = stream.split_at(pos + 1); - new_stream.extend_from_slice(parts.0); - new_stream.push(comma); - new_stream.extend_from_slice(parts.1); - return Some((TokenStream::new(new_stream), sp)); + let mut suggestion = None; + let mut iter = self.0.iter().enumerate().peekable(); + while let Some((pos, ts)) = iter.next() { + if let Some((_, next)) = iter.peek() { + let sp = match (&ts, &next) { + (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue, + ((TokenTree::Token(token_left), NonJoint), + (TokenTree::Token(token_right), _)) + if ((token_left.is_ident() && !token_left.is_reserved_ident()) + || token_left.is_lit()) && + ((token_right.is_ident() && !token_right.is_reserved_ident()) + || token_right.is_lit()) => token_left.span, + ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(), + _ => continue, + }; + let sp = sp.shrink_to_hi(); + let comma = (TokenTree::token(token::Comma, sp), NonJoint); + suggestion = Some((pos, comma, sp)); } } + if let Some((pos, comma, sp)) = suggestion { + let mut new_stream = vec![]; + let parts = self.0.split_at(pos + 1); + new_stream.extend_from_slice(parts.0); + new_stream.push(comma); + new_stream.extend_from_slice(parts.1); + return Some((TokenStream::new(new_stream), sp)); + } None } } @@ -224,28 +217,21 @@ impl PartialEq for TokenStream { } impl TokenStream { - pub fn len(&self) -> usize { - if let Some(ref slice) = self.0 { - slice.len() - } else { - 0 - } + pub fn new(streams: Vec) -> TokenStream { + TokenStream(Lrc::new(streams)) } - pub fn empty() -> TokenStream { - TokenStream(None) + pub fn is_empty(&self) -> bool { + self.0.is_empty() } - pub fn is_empty(&self) -> bool { - match self.0 { - None => true, - Some(ref stream) => stream.is_empty(), - } + pub fn len(&self) -> usize { + self.0.len() } pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream { match streams.len() { - 0 => TokenStream::empty(), + 0 => TokenStream::default(), 1 => streams.pop().unwrap(), _ => { // We are going to extend the first stream in `streams` with @@ -269,41 +255,24 @@ impl TokenStream { // Get the first stream. If it's `None`, create an empty // stream. let mut iter = streams.drain(); - let mut first_stream_lrc = match iter.next().unwrap().0 { - Some(first_stream_lrc) => first_stream_lrc, - None => Lrc::new(vec![]), - }; + let mut first_stream_lrc = iter.next().unwrap().0; // Append the elements to the first stream, after reserving // space for them. let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc); first_vec_mut.reserve(num_appends); for stream in iter { - if let Some(stream) = stream.0 { - first_vec_mut.extend(stream.iter().cloned()); - } + first_vec_mut.extend(stream.0.iter().cloned()); } // Create the final `TokenStream`. - match first_vec_mut.len() { - 0 => TokenStream(None), - _ => TokenStream(Some(first_stream_lrc)), - } + TokenStream(first_stream_lrc) } } } - pub fn new(streams: Vec) -> TokenStream { - match streams.len() { - 0 => TokenStream(None), - _ => TokenStream(Some(Lrc::new(streams))), - } - } - pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec) { - if let Some(stream) = self.0 { - vec.extend(stream.iter().cloned()); - } + vec.extend(self.0.iter().cloned()); } pub fn trees(&self) -> Cursor { @@ -370,24 +339,22 @@ impl TokenStream { } pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { - TokenStream(self.0.map(|stream| { - Lrc::new( - stream - .iter() - .enumerate() - .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint)) - .collect()) - })) + TokenStream(Lrc::new( + self.0 + .iter() + .enumerate() + .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint)) + .collect() + )) } pub fn map TokenTree>(self, mut f: F) -> TokenStream { - TokenStream(self.0.map(|stream| { - Lrc::new( - stream - .iter() - .map(|(tree, is_joint)| (f(tree.clone()), *is_joint)) - .collect()) - })) + TokenStream(Lrc::new( + self.0 + .iter() + .map(|(tree, is_joint)| (f(tree.clone()), *is_joint)) + .collect() + )) } } @@ -405,44 +372,43 @@ impl TokenStreamBuilder { // If `self` is not empty and the last tree within the last stream is a // token tree marked with `Joint`... - if let Some(TokenStream(Some(ref mut last_stream_lrc))) = self.0.last_mut() { + if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() { if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() { // ...and `stream` is not empty and the first tree within it is // a token tree... - if let TokenStream(Some(ref mut stream_lrc)) = stream { - if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() { - - // ...and the two tokens can be glued together... - if let Some(glued_tok) = last_token.glue(&token) { - - // ...then do so, by overwriting the last token - // tree in `self` and removing the first token tree - // from `stream`. This requires using `make_mut()` - // on the last stream in `self` and on `stream`, - // and in practice this doesn't cause cloning 99.9% - // of the time. - - // Overwrite the last token tree with the merged - // token. - let last_vec_mut = Lrc::make_mut(last_stream_lrc); - *last_vec_mut.last_mut().unwrap() = - (TokenTree::Token(glued_tok), *is_joint); - - // Remove the first token tree from `stream`. (This - // is almost always the only tree in `stream`.) - let stream_vec_mut = Lrc::make_mut(stream_lrc); - stream_vec_mut.remove(0); - - // Don't push `stream` if it's empty -- that could - // block subsequent token gluing, by getting - // between two token trees that should be glued - // together. - if !stream.is_empty() { - self.0.push(stream); - } - return; + let TokenStream(ref mut stream_lrc) = stream; + if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() { + + // ...and the two tokens can be glued together... + if let Some(glued_tok) = last_token.glue(&token) { + + // ...then do so, by overwriting the last token + // tree in `self` and removing the first token tree + // from `stream`. This requires using `make_mut()` + // on the last stream in `self` and on `stream`, + // and in practice this doesn't cause cloning 99.9% + // of the time. + + // Overwrite the last token tree with the merged + // token. + let last_vec_mut = Lrc::make_mut(last_stream_lrc); + *last_vec_mut.last_mut().unwrap() = + (TokenTree::Token(glued_tok), *is_joint); + + // Remove the first token tree from `stream`. (This + // is almost always the only tree in `stream`.) + let stream_vec_mut = Lrc::make_mut(stream_lrc); + stream_vec_mut.remove(0); + + // Don't push `stream` if it's empty -- that could + // block subsequent token gluing, by getting + // between two token trees that should be glued + // together. + if !stream.is_empty() { + self.0.push(stream); } + return; } } } @@ -475,16 +441,11 @@ impl Cursor { } pub fn next_with_joint(&mut self) -> Option { - match self.stream.0 { - None => None, - Some(ref stream) => { - if self.index < stream.len() { - self.index += 1; - Some(stream[self.index - 1].clone()) - } else { - None - } - } + if self.index < self.stream.len() { + self.index += 1; + Some(self.stream.0[self.index - 1].clone()) + } else { + None } } @@ -493,16 +454,13 @@ impl Cursor { return; } let index = self.index; - let stream = mem::replace(&mut self.stream, TokenStream(None)); + let stream = mem::take(&mut self.stream); *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees(); self.index = index; } pub fn look_ahead(&self, n: usize) -> Option { - match self.stream.0 { - None => None, - Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), - } + self.stream.0[self.index ..].get(n).map(|(tree, _)| tree.clone()) } } diff --git a/src/libsyntax_ext/plugin_macro_defs.rs b/src/libsyntax_ext/plugin_macro_defs.rs index 315babceae32c..62c7e188eba27 100644 --- a/src/libsyntax_ext/plugin_macro_defs.rs +++ b/src/libsyntax_ext/plugin_macro_defs.rs @@ -20,7 +20,7 @@ fn plugin_macro_def(name: Name, span: Span) -> P { attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span))); let parens: TreeAndJoint = TokenTree::Delimited( - DelimSpan::from_single(span), token::Paren, TokenStream::empty() + DelimSpan::from_single(span), token::Paren, TokenStream::default() ).into(); let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens]; diff --git a/src/test/ui/consts/issue-64506.rs b/src/test/ui/consts/issue-64506.rs new file mode 100644 index 0000000000000..db3e85a7bdfd1 --- /dev/null +++ b/src/test/ui/consts/issue-64506.rs @@ -0,0 +1,20 @@ +// check-pass + +#[derive(Copy, Clone)] +pub struct ChildStdin { + inner: AnonPipe, +} + +#[derive(Copy, Clone)] +enum AnonPipe {} + +const FOO: () = { + union Foo { + a: ChildStdin, + b: (), + } + let x = unsafe { Foo { b: () }.a }; + let x = &x.inner; +}; + +fn main() {}