Skip to content

Commit f5f24a9

Browse files
Merge #8977
8977: internal: minor `TokenMap` cleanups r=jonas-schievink a=jonas-schievink bors r+ Co-authored-by: Jonas Schievink <[email protected]>
2 parents 1ebb53e + c8f40b1 commit f5f24a9

File tree

7 files changed

+101
-90
lines changed

7 files changed

+101
-90
lines changed

crates/hir_expand/src/db.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ pub fn expand_speculative(
155155
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
156156

157157
let token_id = macro_def.map_id_down(token_id);
158-
let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
158+
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
159159
let token = node.syntax_node().covering_element(range).into_token()?;
160160
Some((node.syntax_node(), token))
161161
}

crates/hir_expand/src/hygiene.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ impl HygieneInfo {
154154
},
155155
};
156156

157-
let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?;
157+
let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
158158
Some((tt.with_value(range + tt.value), origin))
159159
}
160160
}

crates/hir_expand/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -329,7 +329,7 @@ impl ExpansionInfo {
329329
let token_id = self.macro_arg.1.token_by_range(range)?;
330330
let token_id = self.macro_def.map_id_down(token_id);
331331

332-
let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
332+
let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
333333

334334
let token = self.expanded.value.covering_element(range).into_token()?;
335335

@@ -354,7 +354,7 @@ impl ExpansionInfo {
354354
},
355355
};
356356

357-
let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
357+
let range = token_map.range_by_token(token_id, token.value.kind())?;
358358
let token =
359359
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
360360
Some((tt.with_value(token), origin))

crates/mbe/src/lib.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ mod tests;
1414

1515
#[cfg(test)]
1616
mod benchmark;
17+
mod token_map;
1718

1819
use std::fmt;
1920

@@ -63,9 +64,12 @@ impl fmt::Display for ExpandError {
6364
}
6465
}
6566

66-
pub use crate::syntax_bridge::{
67-
ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
68-
token_tree_to_syntax_node, TokenMap,
67+
pub use crate::{
68+
syntax_bridge::{
69+
ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
70+
token_tree_to_syntax_node,
71+
},
72+
token_map::TokenMap,
6973
};
7074

7175
/// This struct contains AST for a single `macro_rules` definition. What might

crates/mbe/src/syntax_bridge.rs

Lines changed: 3 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -10,36 +10,8 @@ use syntax::{
1010
};
1111
use tt::buffer::{Cursor, TokenBuffer};
1212

13-
use crate::ExpandError;
1413
use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter};
15-
16-
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
17-
pub enum TokenTextRange {
18-
Token(TextRange),
19-
Delimiter(TextRange),
20-
}
21-
22-
impl TokenTextRange {
23-
pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
24-
match self {
25-
TokenTextRange::Token(it) => Some(it),
26-
TokenTextRange::Delimiter(it) => match kind {
27-
T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
28-
T!['}'] | T![')'] | T![']'] => {
29-
Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
30-
}
31-
_ => None,
32-
},
33-
}
34-
}
35-
}
36-
37-
/// Maps `tt::TokenId` to the relative range of the original token.
38-
#[derive(Debug, PartialEq, Eq, Clone, Default)]
39-
pub struct TokenMap {
40-
/// Maps `tt::TokenId` to the *relative* source range.
41-
entries: Vec<(tt::TokenId, TokenTextRange)>,
42-
}
14+
use crate::{ExpandError, TokenMap};
4315

4416
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
4517
/// will consume).
@@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
5325
let global_offset = node.text_range().start();
5426
let mut c = Convertor::new(node, global_offset);
5527
let subtree = c.go();
56-
c.id_alloc.map.entries.shrink_to_fit();
28+
c.id_alloc.map.shrink_to_fit();
5729
(subtree, c.id_alloc.map)
5830
}
5931

@@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
149121
res
150122
}
151123

152-
impl TokenMap {
153-
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
154-
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
155-
TokenTextRange::Token(it) => *it == relative_range,
156-
TokenTextRange::Delimiter(it) => {
157-
let open = TextRange::at(it.start(), 1.into());
158-
let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
159-
open == relative_range || close == relative_range
160-
}
161-
})?;
162-
Some(token_id)
163-
}
164-
165-
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
166-
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
167-
Some(range)
168-
}
169-
170-
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
171-
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
172-
}
173-
174-
fn insert_delim(
175-
&mut self,
176-
token_id: tt::TokenId,
177-
open_relative_range: TextRange,
178-
close_relative_range: TextRange,
179-
) -> usize {
180-
let res = self.entries.len();
181-
let cover = open_relative_range.cover(close_relative_range);
182-
183-
self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
184-
res
185-
}
186-
187-
fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
188-
let (_, token_text_range) = &mut self.entries[idx];
189-
if let TokenTextRange::Delimiter(dim) = token_text_range {
190-
let cover = dim.cover(close_relative_range);
191-
*token_text_range = TokenTextRange::Delimiter(cover);
192-
}
193-
}
194-
195-
fn remove_delim(&mut self, idx: usize) {
196-
// FIXME: This could be accidentally quadratic
197-
self.entries.remove(idx);
198-
}
199-
}
200-
201124
/// Returns the textual content of a doc comment block as a quoted string
202125
/// That is, strips leading `///` (or `/**`, etc)
203126
/// and strips the ending `*/`
@@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> {
634557
}
635558

636559
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
637-
self.token_map.entries.shrink_to_fit();
560+
self.token_map.shrink_to_fit();
638561
(self.inner.finish(), self.token_map)
639562
}
640563
}

crates/mbe/src/tests/expand.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,8 @@ macro_rules! foobar {
5858
let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
5959
let content = node.syntax_node().to_string();
6060

61-
let get_text = |id, kind| -> String {
62-
content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
63-
};
61+
let get_text =
62+
|id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() };
6463

6564
assert_eq!(expanded.token_trees.len(), 4);
6665
// {($e:ident) => { fn $e() {} }}

crates/mbe/src/token_map.rs

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
2+
3+
use parser::{SyntaxKind, T};
4+
use syntax::{TextRange, TextSize};
5+
6+
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
7+
enum TokenTextRange {
8+
Token(TextRange),
9+
Delimiter(TextRange),
10+
}
11+
12+
impl TokenTextRange {
13+
fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
14+
match self {
15+
TokenTextRange::Token(it) => Some(it),
16+
TokenTextRange::Delimiter(it) => match kind {
17+
T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
18+
T!['}'] | T![')'] | T![']'] => {
19+
Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
20+
}
21+
_ => None,
22+
},
23+
}
24+
}
25+
}
26+
27+
/// Maps `tt::TokenId` to the relative range of the original token.
28+
#[derive(Debug, PartialEq, Eq, Clone, Default)]
29+
pub struct TokenMap {
30+
/// Maps `tt::TokenId` to the *relative* source range.
31+
entries: Vec<(tt::TokenId, TokenTextRange)>,
32+
}
33+
34+
impl TokenMap {
35+
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
36+
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
37+
TokenTextRange::Token(it) => *it == relative_range,
38+
TokenTextRange::Delimiter(it) => {
39+
let open = TextRange::at(it.start(), 1.into());
40+
let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
41+
open == relative_range || close == relative_range
42+
}
43+
})?;
44+
Some(token_id)
45+
}
46+
47+
pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> {
48+
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
49+
range.by_kind(kind)
50+
}
51+
52+
pub(crate) fn shrink_to_fit(&mut self) {
53+
self.entries.shrink_to_fit();
54+
}
55+
56+
pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
57+
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
58+
}
59+
60+
pub(crate) fn insert_delim(
61+
&mut self,
62+
token_id: tt::TokenId,
63+
open_relative_range: TextRange,
64+
close_relative_range: TextRange,
65+
) -> usize {
66+
let res = self.entries.len();
67+
let cover = open_relative_range.cover(close_relative_range);
68+
69+
self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
70+
res
71+
}
72+
73+
pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
74+
let (_, token_text_range) = &mut self.entries[idx];
75+
if let TokenTextRange::Delimiter(dim) = token_text_range {
76+
let cover = dim.cover(close_relative_range);
77+
*token_text_range = TokenTextRange::Delimiter(cover);
78+
}
79+
}
80+
81+
pub(crate) fn remove_delim(&mut self, idx: usize) {
82+
// FIXME: This could be accidentally quadratic
83+
self.entries.remove(idx);
84+
}
85+
}

0 commit comments

Comments
 (0)