From 7678e6ad8560cb559e1ca0b37e362890c25fe92b Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Mon, 28 Jun 2021 19:43:40 -0400 Subject: [PATCH 1/7] proc_macro: support encoding/decoding structs with type parameters --- library/proc_macro/src/bridge/rpc.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index f79e016400fb1..e19fcf68c5c0c 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -43,15 +43,17 @@ macro_rules! rpc_encode_decode { } } }; - (struct $name:ident { $($field:ident),* $(,)? }) => { - impl Encode for $name { + (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => { + impl),+)?> Encode for $name $(<$($T),+>)? { fn encode(self, w: &mut Writer, s: &mut S) { $(self.$field.encode(w, s);)* } } - impl DecodeMut<'_, '_, S> for $name { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { + impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> + for $name $(<$($T),+>)? + { + fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { $name { $($field: DecodeMut::decode(r, s)),* } From 1793ee06589193a33f7f3d6670928dcb5a0f4742 Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Thu, 1 Jul 2021 12:56:07 -0400 Subject: [PATCH 2/7] proc_macro: support encoding/decoding Vec --- library/proc_macro/src/bridge/mod.rs | 15 +++++++++++++++ library/proc_macro/src/bridge/rpc.rs | 20 ++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 4d3e89ba09356..aca43945491fa 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -337,6 +337,21 @@ impl Unmark for Result { } } +impl Mark for Vec { + type Unmarked = Vec; + fn mark(unmarked: Self::Unmarked) -> Self { + // Should be a no-op due to std's in-place collect optimizations. + unmarked.into_iter().map(T::mark).collect() + } +} +impl Unmark for Vec { + type Unmarked = Vec; + fn unmark(self) -> Self::Unmarked { + // Should be a no-op due to std's in-place collect optimizations. + self.into_iter().map(T::unmark).collect() + } +} + macro_rules! mark_noop { ($($ty:ty),* $(,)?) => { $( diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index e19fcf68c5c0c..a94334e07362e 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -248,6 +248,26 @@ impl DecodeMut<'_, '_, S> for String { } } +impl> Encode for Vec { + fn encode(self, w: &mut Writer, s: &mut S) { + self.len().encode(w, s); + for x in self { + x.encode(w, s); + } + } +} + +impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec { + fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { + let len = usize::decode(r, s); + let mut vec = Vec::with_capacity(len); + for _ in 0..len { + vec.push(T::decode(r, s)); + } + vec + } +} + /// Simplified version of panic payloads, ignoring /// types other than `&'static str` and `String`. pub enum PanicMessage { From 2b172194686f0769e70d95cf6d8f571bc29a023e Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Tue, 29 Jun 2021 16:36:10 -0400 Subject: [PATCH 3/7] proc_macro: use macros to simplify aggregate Mark/Unmark definitions --- library/proc_macro/src/bridge/mod.rs | 58 ++++++++++++++++------------ 1 file changed, 34 insertions(+), 24 deletions(-) diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index aca43945491fa..22b4b047396bf 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -409,6 +409,39 @@ rpc_encode_decode!( } ); +macro_rules! mark_compound { + (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => { + impl<$($T: Mark),+> Mark for $name <$($T),+> { + type Unmarked = $name <$($T::Unmarked),+>; + fn mark(unmarked: Self::Unmarked) -> Self { + match unmarked { + $($name::$variant $(($field))? => { + $name::$variant $((Mark::mark($field)))? + })* + } + } + } + + impl<$($T: Unmark),+> Unmark for $name <$($T),+> { + type Unmarked = $name <$($T::Unmarked),+>; + fn unmark(self) -> Self::Unmarked { + match self { + $($name::$variant $(($field))? => { + $name::$variant $((Unmark::unmark($field)))? + })* + } + } + } + } +} + +macro_rules! compound_traits { + ($($t:tt)*) => { + rpc_encode_decode!($($t)*); + mark_compound!($($t)*); + }; +} + #[derive(Clone)] pub enum TokenTree { Group(G), @@ -417,30 +450,7 @@ pub enum TokenTree { Literal(L), } -impl Mark for TokenTree { - type Unmarked = TokenTree; - fn mark(unmarked: Self::Unmarked) -> Self { - match unmarked { - TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)), - TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)), - TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)), - TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)), - } - } -} -impl Unmark for TokenTree { - type Unmarked = TokenTree; - fn unmark(self) -> Self::Unmarked { - match self { - TokenTree::Group(tt) => TokenTree::Group(tt.unmark()), - TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()), - TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()), - TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()), - } - } -} - -rpc_encode_decode!( +compound_traits!( enum TokenTree { Group(tt), Punct(tt), From 0a049fd30d564d1cbc2d60398de848612a6c8125 Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Thu, 1 Jul 2021 15:03:51 -0400 Subject: [PATCH 4/7] proc_macro: reduce the number of messages required to create, extend, and iterate TokenStreams This significantly reduces the cost of common interactions with TokenStream when running with the CrossThread execution strategy, by reducing the number of RPC calls required. --- .../rustc_expand/src/proc_macro_server.rs | 98 ++++++++++--------- library/proc_macro/src/bridge/client.rs | 18 ++-- library/proc_macro/src/bridge/mod.rs | 28 +++--- library/proc_macro/src/bridge/server.rs | 2 - library/proc_macro/src/lib.rs | 78 ++++++++++----- 5 files changed, 128 insertions(+), 96 deletions(-) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index d4407c03d03f5..8b6d5bcd935e1 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -277,12 +277,6 @@ impl ToInternal for Level { pub struct FreeFunctions; -#[derive(Clone)] -pub struct TokenStreamIter { - cursor: tokenstream::Cursor, - stack: Vec>, -} - #[derive(Clone)] pub struct Group { delimiter: Delimiter, @@ -382,8 +376,6 @@ impl<'a, 'b> Rustc<'a, 'b> { impl server::Types for Rustc<'_, '_> { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; - type TokenStreamBuilder = tokenstream::TokenStreamBuilder; - type TokenStreamIter = TokenStreamIter; type Group = Group; type Punct = Punct; type Ident = Ident; @@ -408,9 +400,6 @@ impl server::FreeFunctions for Rustc<'_, '_> { } impl server::TokenStream for Rustc<'_, '_> { - fn new(&mut self) -> Self::TokenStream { - TokenStream::default() - } fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() } @@ -481,53 +470,74 @@ impl server::TokenStream for Rustc<'_, '_> { ) -> Self::TokenStream { tree.to_internal() } - fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { - TokenStreamIter { cursor: stream.into_trees(), stack: vec![] } - } -} - -impl server::TokenStreamBuilder for Rustc<'_, '_> { - fn new(&mut self) -> Self::TokenStreamBuilder { - tokenstream::TokenStreamBuilder::new() - } - fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { - builder.push(stream); + fn concat_trees( + &mut self, + base: Option, + trees: Vec>, + ) -> Self::TokenStream { + let mut builder = tokenstream::TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(tree.to_internal()); + } + builder.build() } - fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { + fn concat_streams( + &mut self, + base: Option, + streams: Vec, + ) -> Self::TokenStream { + let mut builder = tokenstream::TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } builder.build() } -} - -impl server::TokenStreamIter for Rustc<'_, '_> { - fn next( + fn into_iter( &mut self, - iter: &mut Self::TokenStreamIter, - ) -> Option> { + stream: Self::TokenStream, + ) -> Vec> { + // XXX: This is a raw port of the previous approach, and can probably be + // optimized. + let mut cursor = stream.into_trees(); + let mut stack = Vec::new(); + let mut tts = Vec::new(); loop { - let tree = iter.stack.pop().or_else(|| { - let next = iter.cursor.next_with_spacing()?; - Some(TokenTree::from_internal((next, &mut iter.stack, self))) - })?; - // A hack used to pass AST fragments to attribute and derive macros - // as a single nonterminal token instead of a token stream. - // Such token needs to be "unwrapped" and not represented as a delimited group. - // FIXME: It needs to be removed, but there are some compatibility issues (see #73345). - if let TokenTree::Group(ref group) = tree { - if group.flatten { - iter.cursor.append(group.stream.clone()); - continue; + let next = stack.pop().or_else(|| { + let next = cursor.next_with_spacing()?; + Some(TokenTree::from_internal((next, &mut stack, self))) + }); + match next { + Some(TokenTree::Group(group)) => { + // A hack used to pass AST fragments to attribute and derive + // macros as a single nonterminal token instead of a token + // stream. Such token needs to be "unwrapped" and not + // represented as a delimited group. + // FIXME: It needs to be removed, but there are some + // compatibility issues (see #73345). + if group.flatten { + cursor.append(group.stream); + continue; + } + tts.push(TokenTree::Group(group)); } + Some(tt) => tts.push(tt), + None => return tts, } - return Some(tree); } } } impl server::Group for Rustc<'_, '_> { - fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group { + fn new(&mut self, delimiter: Delimiter, stream: Option) -> Self::Group { Group { delimiter, - stream, + stream: stream.unwrap_or_default(), span: DelimSpan::from_single(server::Span::call_site(self)), flatten: false, } diff --git a/library/proc_macro/src/bridge/client.rs b/library/proc_macro/src/bridge/client.rs index c38457ac6712d..421dd30273fd8 100644 --- a/library/proc_macro/src/bridge/client.rs +++ b/library/proc_macro/src/bridge/client.rs @@ -178,8 +178,6 @@ define_handles! { 'owned: FreeFunctions, TokenStream, - TokenStreamBuilder, - TokenStreamIter, Group, Literal, SourceFile, @@ -204,12 +202,6 @@ impl Clone for TokenStream { } } -impl Clone for TokenStreamIter { - fn clone(&self) -> Self { - self.clone() - } -} - impl Clone for Group { fn clone(&self) -> Self { self.clone() @@ -435,7 +427,11 @@ impl Client { Client { get_handle_counters: HandleCounters::get, run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { - run_client(bridge, |input| f(crate::TokenStream(input)).0) + run_client(bridge, |input| { + f(crate::TokenStream(Some(input))) + .0 + .unwrap_or_else(|| TokenStream::concat_streams(None, vec![])) + }) }), _marker: PhantomData, } @@ -450,7 +446,9 @@ impl Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> { get_handle_counters: HandleCounters::get, run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { run_client(bridge, |(input, input2)| { - f(crate::TokenStream(input), crate::TokenStream(input2)).0 + f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2))) + .0 + .unwrap_or_else(|| TokenStream::concat_streams(None, vec![])) }) }), _marker: PhantomData, diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 22b4b047396bf..c6d0635df576d 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -60,7 +60,6 @@ macro_rules! with_api { TokenStream { fn drop($self: $S::TokenStream); fn clone($self: &$S::TokenStream) -> $S::TokenStream; - fn new() -> $S::TokenStream; fn is_empty($self: &$S::TokenStream) -> bool; fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; fn from_str(src: &str) -> $S::TokenStream; @@ -68,25 +67,22 @@ macro_rules! with_api { fn from_token_tree( tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>, ) -> $S::TokenStream; - fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter; - }, - TokenStreamBuilder { - fn drop($self: $S::TokenStreamBuilder); - fn new() -> $S::TokenStreamBuilder; - fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream); - fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream; - }, - TokenStreamIter { - fn drop($self: $S::TokenStreamIter); - fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter; - fn next( - $self: &mut $S::TokenStreamIter, - ) -> Option>; + fn concat_trees( + base: Option<$S::TokenStream>, + trees: Vec>, + ) -> $S::TokenStream; + fn concat_streams( + base: Option<$S::TokenStream>, + trees: Vec<$S::TokenStream>, + ) -> $S::TokenStream; + fn into_iter( + $self: $S::TokenStream + ) -> Vec>; }, Group { fn drop($self: $S::Group); fn clone($self: &$S::Group) -> $S::Group; - fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group; + fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group; fn delimiter($self: &$S::Group) -> Delimiter; fn stream($self: &$S::Group) -> $S::TokenStream; fn span($self: &$S::Group) -> $S::Span; diff --git a/library/proc_macro/src/bridge/server.rs b/library/proc_macro/src/bridge/server.rs index cbddf39da44d2..d98def36a3ca7 100644 --- a/library/proc_macro/src/bridge/server.rs +++ b/library/proc_macro/src/bridge/server.rs @@ -8,8 +8,6 @@ use super::client::HandleStore; pub trait Types { type FreeFunctions: 'static; type TokenStream: 'static + Clone; - type TokenStreamBuilder: 'static; - type TokenStreamIter: 'static + Clone; type Group: 'static + Clone; type Punct: 'static + Copy + Eq + Hash; type Ident: 'static + Copy + Eq + Hash; diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 30ad3d2388082..c21f365391c3b 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -43,7 +43,7 @@ use std::cmp::Ordering; use std::ops::RangeBounds; use std::path::PathBuf; use std::str::FromStr; -use std::{error, fmt, iter, mem}; +use std::{error, fmt, iter}; /// Determines whether proc_macro has been made accessible to the currently /// running program. @@ -72,7 +72,7 @@ pub fn is_available() -> bool { /// and `#[proc_macro_derive]` definitions. #[stable(feature = "proc_macro_lib", since = "1.15.0")] #[derive(Clone)] -pub struct TokenStream(bridge::client::TokenStream); +pub struct TokenStream(Option); #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl !Send for TokenStream {} @@ -126,13 +126,13 @@ impl TokenStream { /// Returns an empty `TokenStream` containing no token trees. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub fn new() -> TokenStream { - TokenStream(bridge::client::TokenStream::new()) + TokenStream(None) } /// Checks if this `TokenStream` is empty. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub fn is_empty(&self) -> bool { - self.0.is_empty() + self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true) } /// Parses this `TokenStream` as an expression and attempts to expand any @@ -147,8 +147,9 @@ impl TokenStream { /// considered errors, is unspecified and may change in the future. #[unstable(feature = "proc_macro_expand", issue = "90765")] pub fn expand_expr(&self) -> Result { - match bridge::client::TokenStream::expand_expr(&self.0) { - Ok(stream) => Ok(TokenStream(stream)), + let stream = self.0.as_ref().ok_or(ExpandError)?; + match bridge::client::TokenStream::expand_expr(stream) { + Ok(stream) => Ok(TokenStream(Some(stream))), Err(_) => Err(ExpandError), } } @@ -166,7 +167,7 @@ impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { - Ok(TokenStream(bridge::client::TokenStream::from_str(src))) + Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src)))) } } @@ -175,7 +176,7 @@ impl FromStr for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl ToString for TokenStream { fn to_string(&self) -> String { - self.0.to_string() + self.0.as_ref().map(|t| t.to_string()).unwrap_or_default() } } @@ -208,16 +209,27 @@ impl Default for TokenStream { #[unstable(feature = "proc_macro_quote", issue = "54722")] pub use quote::{quote, quote_span}; +fn tree_to_bridge_tree( + tree: TokenTree, +) -> bridge::TokenTree< + bridge::client::Group, + bridge::client::Punct, + bridge::client::Ident, + bridge::client::Literal, +> { + match tree { + TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), + TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), + TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), + TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), + } +} + /// Creates a token stream containing a single token tree. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream(bridge::client::TokenStream::from_token_tree(match tree { - TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), - TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), - TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), - TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), - })) + TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree)))) } } @@ -225,7 +237,10 @@ impl From for TokenStream { #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl iter::FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { - trees.into_iter().map(TokenStream::from).collect() + TokenStream(Some(bridge::client::TokenStream::concat_trees( + None, + trees.into_iter().map(tree_to_bridge_tree).collect(), + ))) } } @@ -234,24 +249,30 @@ impl iter::FromIterator for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl iter::FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { - let mut builder = bridge::client::TokenStreamBuilder::new(); - streams.into_iter().for_each(|stream| builder.push(stream.0)); - TokenStream(builder.build()) + TokenStream(Some(bridge::client::TokenStream::concat_streams( + None, + streams.into_iter().filter_map(|stream| stream.0).collect(), + ))) } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend for TokenStream { fn extend>(&mut self, trees: I) { - self.extend(trees.into_iter().map(TokenStream::from)); + *self = TokenStream(Some(bridge::client::TokenStream::concat_trees( + self.0.take(), + trees.into_iter().map(|tree| tree_to_bridge_tree(tree)).collect(), + ))); } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend for TokenStream { fn extend>(&mut self, streams: I) { - // FIXME(eddyb) Use an optimized implementation if/when possible. - *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect(); + *self = TokenStream(Some(bridge::client::TokenStream::concat_streams( + self.0.take(), + streams.into_iter().filter_map(|stream| stream.0).collect(), + ))); } } @@ -265,7 +286,16 @@ pub mod token_stream { /// and returns whole groups as token trees. #[derive(Clone)] #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - pub struct IntoIter(bridge::client::TokenStreamIter); + pub struct IntoIter( + std::vec::IntoIter< + bridge::TokenTree< + bridge::client::Group, + bridge::client::Punct, + bridge::client::Ident, + bridge::client::Literal, + >, + >, + ); #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl Iterator for IntoIter { @@ -287,7 +317,7 @@ pub mod token_stream { type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { - IntoIter(self.0.into_iter()) + IntoIter(self.0.map(|v| v.into_iter()).unwrap_or_default().into_iter()) } } } @@ -734,7 +764,7 @@ impl Group { /// returned above. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub fn stream(&self) -> TokenStream { - TokenStream(self.0.stream()) + TokenStream(Some(self.0.stream())) } /// Returns the span for the delimiters of this token stream, spanning the From 4d45af9e734ed0e2350290b4705d7931f70349d4 Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Sun, 15 May 2022 13:46:33 -0400 Subject: [PATCH 5/7] Try to reduce codegen complexity of TokenStream's FromIterator and Extend impls This is an experimental patch to try to reduce the codegen complexity of TokenStream's FromIterator and Extend implementations for downstream crates, by moving the core logic into a helper type. This might help improve build performance of crates which depend on proc_macro as iterators are used less, and the compiler may take less time to do things like attempt specializations or other iterator optimizations. The change intentionally sacrifices some optimization opportunities, such as using the specializations for collecting iterators derived from Vec::into_iter() into Vec. This is one of the simpler potential approaches to reducing the amount of code generated in crates depending on proc_macro, so it seems worth trying before other more-involved changes. --- .../rustc_expand/src/proc_macro_server.rs | 4 +- library/proc_macro/src/lib.rs | 108 +++++++++++++++--- 2 files changed, 94 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 8b6d5bcd935e1..cc66eefac3e89 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -502,8 +502,8 @@ impl server::TokenStream for Rustc<'_, '_> { &mut self, stream: Self::TokenStream, ) -> Vec> { - // XXX: This is a raw port of the previous approach, and can probably be - // optimized. + // FIXME: This is a raw port of the previous approach, and can probably + // be optimized. let mut cursor = stream.into_trees(); let mut stack = Vec::new(); let mut tts = Vec::new(); diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index c21f365391c3b..6e645216c8dd2 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -233,14 +233,90 @@ impl From for TokenStream { } } +/// Non-generic helper for implementing `FromIterator` and +/// `Extend` with less monomorphization in calling crates. +struct ExtendStreamWithTreesHelper { + trees: Vec< + bridge::TokenTree< + bridge::client::Group, + bridge::client::Punct, + bridge::client::Ident, + bridge::client::Literal, + >, + >, +} + +impl ExtendStreamWithTreesHelper { + fn new(capacity: usize) -> Self { + ExtendStreamWithTreesHelper { trees: Vec::with_capacity(capacity) } + } + + fn push(&mut self, tree: TokenTree) { + self.trees.push(tree_to_bridge_tree(tree)); + } + + fn build(self) -> TokenStream { + if self.trees.is_empty() { + TokenStream(None) + } else { + TokenStream(Some(bridge::client::TokenStream::concat_trees(None, self.trees))) + } + } + + fn extend(self, stream: &mut TokenStream) { + if self.trees.is_empty() { + return; + } + stream.0 = Some(bridge::client::TokenStream::concat_trees(stream.0.take(), self.trees)) + } +} + +/// Non-generic helper for implementing `FromIterator` and +/// `Extend` with less monomorphization in calling crates. +struct ExtendStreamWithStreamsHelper { + streams: Vec, +} + +impl ExtendStreamWithStreamsHelper { + fn new(capacity: usize) -> Self { + ExtendStreamWithStreamsHelper { streams: Vec::with_capacity(capacity) } + } + + fn push(&mut self, stream: TokenStream) { + if let Some(stream) = stream.0 { + self.streams.push(stream); + } + } + + fn build(mut self) -> TokenStream { + if self.streams.len() <= 1 { + TokenStream(self.streams.pop()) + } else { + TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams))) + } + } + + fn extend(mut self, stream: &mut TokenStream) { + if self.streams.is_empty() { + return; + } + let base = stream.0.take(); + if base.is_none() && self.streams.len() == 1 { + stream.0 = self.streams.pop(); + } else { + stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams)); + } + } +} + /// Collects a number of token trees into a single stream. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl iter::FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { - TokenStream(Some(bridge::client::TokenStream::concat_trees( - None, - trees.into_iter().map(tree_to_bridge_tree).collect(), - ))) + let iter = trees.into_iter(); + let mut builder = ExtendStreamWithTreesHelper::new(iter.size_hint().0); + iter.for_each(|tree| builder.push(tree)); + builder.build() } } @@ -249,30 +325,30 @@ impl iter::FromIterator for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl iter::FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { - TokenStream(Some(bridge::client::TokenStream::concat_streams( - None, - streams.into_iter().filter_map(|stream| stream.0).collect(), - ))) + let iter = streams.into_iter(); + let mut builder = ExtendStreamWithStreamsHelper::new(iter.size_hint().0); + iter.for_each(|stream| builder.push(stream)); + builder.build() } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend for TokenStream { fn extend>(&mut self, trees: I) { - *self = TokenStream(Some(bridge::client::TokenStream::concat_trees( - self.0.take(), - trees.into_iter().map(|tree| tree_to_bridge_tree(tree)).collect(), - ))); + let iter = trees.into_iter(); + let mut builder = ExtendStreamWithTreesHelper::new(iter.size_hint().0); + iter.for_each(|tree| builder.push(tree)); + builder.extend(self); } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend for TokenStream { fn extend>(&mut self, streams: I) { - *self = TokenStream(Some(bridge::client::TokenStream::concat_streams( - self.0.take(), - streams.into_iter().filter_map(|stream| stream.0).collect(), - ))); + let iter = streams.into_iter(); + let mut builder = ExtendStreamWithStreamsHelper::new(iter.size_hint().0); + iter.for_each(|stream| builder.push(stream)); + builder.extend(self); } } From af514240089891ddfaa3b9368dd255513141e7a9 Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Fri, 17 Jun 2022 11:07:42 -0400 Subject: [PATCH 6/7] Move empty final TokenStream handling to server side of bridge --- library/proc_macro/src/bridge/client.rs | 10 ++-------- library/proc_macro/src/bridge/server.rs | 20 ++++++++++++++------ 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/library/proc_macro/src/bridge/client.rs b/library/proc_macro/src/bridge/client.rs index 421dd30273fd8..068f3e241beac 100644 --- a/library/proc_macro/src/bridge/client.rs +++ b/library/proc_macro/src/bridge/client.rs @@ -427,11 +427,7 @@ impl Client { Client { get_handle_counters: HandleCounters::get, run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { - run_client(bridge, |input| { - f(crate::TokenStream(Some(input))) - .0 - .unwrap_or_else(|| TokenStream::concat_streams(None, vec![])) - }) + run_client(bridge, |input| f(crate::TokenStream(Some(input))).0) }), _marker: PhantomData, } @@ -446,9 +442,7 @@ impl Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> { get_handle_counters: HandleCounters::get, run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { run_client(bridge, |(input, input2)| { - f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2))) - .0 - .unwrap_or_else(|| TokenStream::concat_streams(None, vec![])) + f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2))).0 }) }), _marker: PhantomData, diff --git a/library/proc_macro/src/bridge/server.rs b/library/proc_macro/src/bridge/server.rs index d98def36a3ca7..3672299f18f48 100644 --- a/library/proc_macro/src/bridge/server.rs +++ b/library/proc_macro/src/bridge/server.rs @@ -273,13 +273,17 @@ fn run_server< } impl client::Client { - pub fn run( + pub fn run( &self, strategy: &impl ExecutionStrategy, server: S, input: S::TokenStream, force_show_panics: bool, - ) -> Result { + ) -> Result + where + S: Server, + S::TokenStream: Default, + { let client::Client { get_handle_counters, run, _marker } = *self; run_server( strategy, @@ -289,19 +293,23 @@ impl client::Client { run, force_show_panics, ) - .map( as Types>::TokenStream::unmark) + .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) } } impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> { - pub fn run( + pub fn run( &self, strategy: &impl ExecutionStrategy, server: S, input: S::TokenStream, input2: S::TokenStream, force_show_panics: bool, - ) -> Result { + ) -> Result + where + S: Server, + S::TokenStream: Default, + { let client::Client { get_handle_counters, run, _marker } = *self; run_server( strategy, @@ -314,6 +322,6 @@ impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream run, force_show_panics, ) - .map( as Types>::TokenStream::unmark) + .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) } } From df925fda9c9eee8010564dde7daa44bc5286446e Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Fri, 17 Jun 2022 22:10:07 -0400 Subject: [PATCH 7/7] review fixups --- .../rustc_expand/src/proc_macro_server.rs | 7 +-- library/proc_macro/src/bridge/mod.rs | 53 +++++++++---------- library/proc_macro/src/bridge/rpc.rs | 23 -------- library/proc_macro/src/lib.rs | 30 +++++------ 4 files changed, 43 insertions(+), 70 deletions(-) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index cc66eefac3e89..af0b5639d6136 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -498,12 +498,13 @@ impl server::TokenStream for Rustc<'_, '_> { } builder.build() } - fn into_iter( + fn into_trees( &mut self, stream: Self::TokenStream, ) -> Vec> { - // FIXME: This is a raw port of the previous approach, and can probably - // be optimized. + // FIXME: This is a raw port of the previous approach (which had a + // `TokenStreamIter` server-side object with a single `next` method), + // and can probably be optimized (for bulk conversion). let mut cursor = stream.into_trees(); let mut stack = Vec::new(); let mut tts = Vec::new(); diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index c6d0635df576d..4e931569ef633 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -73,9 +73,9 @@ macro_rules! with_api { ) -> $S::TokenStream; fn concat_streams( base: Option<$S::TokenStream>, - trees: Vec<$S::TokenStream>, + streams: Vec<$S::TokenStream>, ) -> $S::TokenStream; - fn into_iter( + fn into_trees( $self: $S::TokenStream ) -> Vec>; }, @@ -307,32 +307,6 @@ impl<'a, T, M> Unmark for &'a mut Marked { } } -impl Mark for Option { - type Unmarked = Option; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked.map(T::mark) - } -} -impl Unmark for Option { - type Unmarked = Option; - fn unmark(self) -> Self::Unmarked { - self.map(T::unmark) - } -} - -impl Mark for Result { - type Unmarked = Result; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked.map(T::mark).map_err(E::mark) - } -} -impl Unmark for Result { - type Unmarked = Result; - fn unmark(self) -> Self::Unmarked { - self.map(T::unmark).map_err(E::unmark) - } -} - impl Mark for Vec { type Unmarked = Vec; fn mark(unmarked: Self::Unmarked) -> Self { @@ -378,7 +352,6 @@ mark_noop! { Level, LineColumn, Spacing, - Bound, } rpc_encode_decode!( @@ -438,6 +411,28 @@ macro_rules! compound_traits { }; } +compound_traits!( + enum Bound { + Included(x), + Excluded(x), + Unbounded, + } +); + +compound_traits!( + enum Option { + Some(t), + None, + } +); + +compound_traits!( + enum Result { + Ok(t), + Err(e), + } +); + #[derive(Clone)] pub enum TokenTree { Group(G), diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index a94334e07362e..e9d7a46c06f6d 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -4,7 +4,6 @@ use std::any::Any; use std::char; use std::io::Write; use std::num::NonZeroU32; -use std::ops::Bound; use std::str; pub(super) type Writer = super::buffer::Buffer; @@ -186,28 +185,6 @@ impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> De } } -rpc_encode_decode!( - enum Bound { - Included(x), - Excluded(x), - Unbounded, - } -); - -rpc_encode_decode!( - enum Option { - None, - Some(x), - } -); - -rpc_encode_decode!( - enum Result { - Ok(x), - Err(e), - } -); - impl Encode for &[u8] { fn encode(self, w: &mut Writer, s: &mut S) { self.len().encode(w, s); diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 6e645216c8dd2..5e1289ec79d30 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -235,7 +235,7 @@ impl From for TokenStream { /// Non-generic helper for implementing `FromIterator` and /// `Extend` with less monomorphization in calling crates. -struct ExtendStreamWithTreesHelper { +struct ConcatTreesHelper { trees: Vec< bridge::TokenTree< bridge::client::Group, @@ -246,9 +246,9 @@ struct ExtendStreamWithTreesHelper { >, } -impl ExtendStreamWithTreesHelper { +impl ConcatTreesHelper { fn new(capacity: usize) -> Self { - ExtendStreamWithTreesHelper { trees: Vec::with_capacity(capacity) } + ConcatTreesHelper { trees: Vec::with_capacity(capacity) } } fn push(&mut self, tree: TokenTree) { @@ -263,7 +263,7 @@ impl ExtendStreamWithTreesHelper { } } - fn extend(self, stream: &mut TokenStream) { + fn append_to(self, stream: &mut TokenStream) { if self.trees.is_empty() { return; } @@ -273,13 +273,13 @@ impl ExtendStreamWithTreesHelper { /// Non-generic helper for implementing `FromIterator` and /// `Extend` with less monomorphization in calling crates. -struct ExtendStreamWithStreamsHelper { +struct ConcatStreamsHelper { streams: Vec, } -impl ExtendStreamWithStreamsHelper { +impl ConcatStreamsHelper { fn new(capacity: usize) -> Self { - ExtendStreamWithStreamsHelper { streams: Vec::with_capacity(capacity) } + ConcatStreamsHelper { streams: Vec::with_capacity(capacity) } } fn push(&mut self, stream: TokenStream) { @@ -296,7 +296,7 @@ impl ExtendStreamWithStreamsHelper { } } - fn extend(mut self, stream: &mut TokenStream) { + fn append_to(mut self, stream: &mut TokenStream) { if self.streams.is_empty() { return; } @@ -314,7 +314,7 @@ impl ExtendStreamWithStreamsHelper { impl iter::FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { let iter = trees.into_iter(); - let mut builder = ExtendStreamWithTreesHelper::new(iter.size_hint().0); + let mut builder = ConcatTreesHelper::new(iter.size_hint().0); iter.for_each(|tree| builder.push(tree)); builder.build() } @@ -326,7 +326,7 @@ impl iter::FromIterator for TokenStream { impl iter::FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { let iter = streams.into_iter(); - let mut builder = ExtendStreamWithStreamsHelper::new(iter.size_hint().0); + let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); iter.for_each(|stream| builder.push(stream)); builder.build() } @@ -336,9 +336,9 @@ impl iter::FromIterator for TokenStream { impl Extend for TokenStream { fn extend>(&mut self, trees: I) { let iter = trees.into_iter(); - let mut builder = ExtendStreamWithTreesHelper::new(iter.size_hint().0); + let mut builder = ConcatTreesHelper::new(iter.size_hint().0); iter.for_each(|tree| builder.push(tree)); - builder.extend(self); + builder.append_to(self); } } @@ -346,9 +346,9 @@ impl Extend for TokenStream { impl Extend for TokenStream { fn extend>(&mut self, streams: I) { let iter = streams.into_iter(); - let mut builder = ExtendStreamWithStreamsHelper::new(iter.size_hint().0); + let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); iter.for_each(|stream| builder.push(stream)); - builder.extend(self); + builder.append_to(self); } } @@ -393,7 +393,7 @@ pub mod token_stream { type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { - IntoIter(self.0.map(|v| v.into_iter()).unwrap_or_default().into_iter()) + IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter()) } } }