Skip to content

Commit ef8162e

Browse files
author
bors-servo
authored
Auto merge of #269 - servo:nll, r=SimonSapin
Clean up a bit now that lifetimes are non-lexical. I was doing some performance work and some of these clone() calls appear in profiles. There's more to do but I'd rather take out the low hanging fruit first :)
2 parents 65d6c3e + cfd59ca commit ef8162e

File tree

5 files changed

+89
-88
lines changed

5 files changed

+89
-88
lines changed

src/color.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -280,22 +280,22 @@ impl Color {
280280
where
281281
ComponentParser: ColorComponentParser<'i>,
282282
{
283-
// FIXME: remove clone() when lifetimes are non-lexical
284283
let location = input.current_source_location();
285-
let token = input.next()?.clone();
286-
match token {
284+
let token = input.next()?;
285+
match *token {
287286
Token::Hash(ref value) | Token::IDHash(ref value) => {
288287
Color::parse_hash(value.as_bytes())
289288
}
290289
Token::Ident(ref value) => parse_color_keyword(&*value),
291290
Token::Function(ref name) => {
291+
let name = name.clone();
292292
return input.parse_nested_block(|arguments| {
293293
parse_color_function(component_parser, &*name, arguments)
294294
})
295295
}
296296
_ => Err(()),
297297
}
298-
.map_err(|()| location.new_unexpected_token_error(token))
298+
.map_err(|()| location.new_unexpected_token_error(token.clone()))
299299
}
300300

301301
/// Parse a <color> value, per CSS Color Module Level 3.

src/nth.rs

+30-17
Original file line numberDiff line numberDiff line change
@@ -10,28 +10,30 @@ use matches::matches;
1010
/// in which case the caller needs to check if the arguments’ parser is exhausted.
1111
/// Return `Ok((A, B))`, or `Err(())` for a syntax error.
1212
pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), BasicParseError<'i>> {
13-
// FIXME: remove .clone() when lifetimes are non-lexical.
14-
match input.next()?.clone() {
13+
match *input.next()? {
1514
Token::Number {
1615
int_value: Some(b), ..
1716
} => Ok((0, b)),
1817
Token::Dimension {
1918
int_value: Some(a),
20-
unit,
19+
ref unit,
2120
..
2221
} => {
2322
match_ignore_ascii_case! {
24-
&unit,
23+
unit,
2524
"n" => Ok(parse_b(input, a)?),
2625
"n-" => Ok(parse_signless_b(input, a, -1)?),
2726
_ => match parse_n_dash_digits(&*unit) {
2827
Ok(b) => Ok((a, b)),
29-
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(unit.clone())))
28+
Err(()) => {
29+
let unit = unit.clone();
30+
Err(input.new_basic_unexpected_token_error(Token::Ident(unit)))
31+
}
3032
}
3133
}
3234
}
33-
Token::Ident(value) => {
34-
match_ignore_ascii_case! { &value,
35+
Token::Ident(ref value) => {
36+
match_ignore_ascii_case! { value,
3537
"even" => Ok((2, 0)),
3638
"odd" => Ok((2, 1)),
3739
"n" => Ok(parse_b(input, 1)?),
@@ -42,30 +44,41 @@ pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), Basic
4244
let (slice, a) = if value.starts_with("-") {
4345
(&value[1..], -1)
4446
} else {
45-
(&*value, 1)
47+
(&**value, 1)
4648
};
4749
match parse_n_dash_digits(slice) {
4850
Ok(b) => Ok((a, b)),
49-
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
51+
Err(()) => {
52+
let value = value.clone();
53+
Err(input.new_basic_unexpected_token_error(Token::Ident(value)))
54+
}
5055
}
5156
}
5257
}
5358
}
54-
// FIXME: remove .clone() when lifetimes are non-lexical.
55-
Token::Delim('+') => match input.next_including_whitespace()?.clone() {
56-
Token::Ident(value) => {
57-
match_ignore_ascii_case! { &value,
59+
Token::Delim('+') => match *input.next_including_whitespace()? {
60+
Token::Ident(ref value) => {
61+
match_ignore_ascii_case! { value,
5862
"n" => parse_b(input, 1),
5963
"n-" => parse_signless_b(input, 1, -1),
60-
_ => match parse_n_dash_digits(&*value) {
64+
_ => match parse_n_dash_digits(value) {
6165
Ok(b) => Ok((1, b)),
62-
Err(()) => Err(input.new_basic_unexpected_token_error(Token::Ident(value.clone())))
66+
Err(()) => {
67+
let value = value.clone();
68+
Err(input.new_basic_unexpected_token_error(Token::Ident(value)))
69+
}
6370
}
6471
}
6572
}
66-
token => Err(input.new_basic_unexpected_token_error(token)),
73+
ref token => {
74+
let token = token.clone();
75+
Err(input.new_basic_unexpected_token_error(token))
76+
},
77+
},
78+
ref token => {
79+
let token = token.clone();
80+
Err(input.new_basic_unexpected_token_error(token))
6781
},
68-
token => Err(input.new_basic_unexpected_token_error(token)),
6982
}
7083
}
7184

src/parser.rs

+24-27
Original file line numberDiff line numberDiff line change
@@ -787,30 +787,30 @@ impl<'i: 't, 't> Parser<'i, 't> {
787787
/// Parse a <url-token> and return the unescaped value.
788788
#[inline]
789789
pub fn expect_url(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
790-
// FIXME: revert early returns when lifetimes are non-lexical
791790
expect! {self,
792-
Token::UnquotedUrl(ref value) => return Ok(value.clone()),
793-
Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {}
791+
Token::UnquotedUrl(ref value) => Ok(value.clone()),
792+
Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {
793+
self.parse_nested_block(|input| {
794+
input.expect_string().map_err(Into::into).map(|s| s.clone())
795+
})
796+
.map_err(ParseError::<()>::basic)
797+
}
794798
}
795-
self.parse_nested_block(|input| {
796-
input.expect_string().map_err(Into::into).map(|s| s.clone())
797-
})
798-
.map_err(ParseError::<()>::basic)
799799
}
800800

801801
/// Parse either a <url-token> or a <string-token>, and return the unescaped value.
802802
#[inline]
803803
pub fn expect_url_or_string(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
804-
// FIXME: revert early returns when lifetimes are non-lexical
805804
expect! {self,
806-
Token::UnquotedUrl(ref value) => return Ok(value.clone()),
807-
Token::QuotedString(ref value) => return Ok(value.clone()),
808-
Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {}
805+
Token::UnquotedUrl(ref value) => Ok(value.clone()),
806+
Token::QuotedString(ref value) => Ok(value.clone()),
807+
Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {
808+
self.parse_nested_block(|input| {
809+
input.expect_string().map_err(Into::into).map(|s| s.clone())
810+
})
811+
.map_err(ParseError::<()>::basic)
812+
}
809813
}
810-
self.parse_nested_block(|input| {
811-
input.expect_string().map_err(Into::into).map(|s| s.clone())
812-
})
813-
.map_err(ParseError::<()>::basic)
814814
}
815815

816816
/// Parse a <number-token> and return the integer value.
@@ -928,30 +928,27 @@ impl<'i: 't, 't> Parser<'i, 't> {
928928
/// See `Token::is_parse_error`. This also checks nested blocks and functions recursively.
929929
#[inline]
930930
pub fn expect_no_error_token(&mut self) -> Result<(), BasicParseError<'i>> {
931-
// FIXME: remove break and intermediate variable when lifetimes are non-lexical
932-
let token;
933931
loop {
934932
match self.next_including_whitespace_and_comments() {
935933
Ok(&Token::Function(_))
936934
| Ok(&Token::ParenthesisBlock)
937935
| Ok(&Token::SquareBracketBlock)
938-
| Ok(&Token::CurlyBracketBlock) => {}
936+
| Ok(&Token::CurlyBracketBlock) => {
937+
self.parse_nested_block(|input| {
938+
input.expect_no_error_token().map_err(Into::into)
939+
}).map_err(ParseError::<()>::basic)?
940+
}
939941
Ok(t) => {
942+
// FIXME: maybe these should be separate variants of
943+
// BasicParseError instead?
940944
if t.is_parse_error() {
941-
token = t.clone();
942-
break;
945+
let token = t.clone();
946+
return Err(self.new_basic_unexpected_token_error(token))
943947
}
944-
continue;
945948
}
946949
Err(_) => return Ok(()),
947950
}
948-
let result = self.parse_nested_block(|input| {
949-
input.expect_no_error_token().map_err(|e| Into::into(e))
950-
});
951-
result.map_err(ParseError::<()>::basic)?
952951
}
953-
// FIXME: maybe these should be separate variants of BasicParseError instead?
954-
Err(self.new_basic_unexpected_token_error(token))
955952
}
956953
}
957954

src/rules_and_declarations.rs

+26-37
Original file line numberDiff line numberDiff line change
@@ -266,17 +266,10 @@ where
266266
fn next(&mut self) -> Option<Self::Item> {
267267
loop {
268268
let start = self.input.state();
269-
// FIXME: remove intermediate variable when lifetimes are non-lexical
270-
let ident = match self.input.next_including_whitespace_and_comments() {
269+
match self.input.next_including_whitespace_and_comments() {
271270
Ok(&Token::WhiteSpace(_)) | Ok(&Token::Comment(_)) | Ok(&Token::Semicolon) => continue,
272-
Ok(&Token::Ident(ref name)) => Ok(Ok(name.clone())),
273-
Ok(&Token::AtKeyword(ref name)) => Ok(Err(name.clone())),
274-
Ok(token) => Err(token.clone()),
275-
Err(_) => return None,
276-
};
277-
match ident {
278-
Ok(Ok(name)) => {
279-
// Ident
271+
Ok(&Token::Ident(ref name)) => {
272+
let name = name.clone();
280273
let result = {
281274
let parser = &mut self.parser;
282275
// FIXME: https://github.com/servo/rust-cssparser/issues/254
@@ -288,18 +281,20 @@ where
288281
};
289282
return Some(result.map_err(|e| (e, self.input.slice_from(start.position()))));
290283
}
291-
Ok(Err(name)) => {
292-
// At-keyword
284+
Ok(&Token::AtKeyword(ref name)) => {
285+
let name = name.clone();
293286
return Some(parse_at_rule(&start, name, self.input, &mut self.parser));
294287
}
295-
Err(token) => {
288+
Ok(token) => {
289+
let token = token.clone();
296290
let result = self.input.parse_until_after(Delimiter::Semicolon, |_| {
297291
Err(start
298292
.source_location()
299-
.new_unexpected_token_error(token.clone()))
293+
.new_unexpected_token_error(token))
300294
});
301295
return Some(result.map_err(|e| (e, self.input.slice_from(start.position()))));
302296
}
297+
Err(..) => return None,
303298
}
304299
}
305300
}
@@ -374,21 +369,18 @@ where
374369
}
375370
let start = self.input.state();
376371

377-
let at_keyword;
378-
match self.input.next_byte() {
379-
Some(b'@') => {
372+
let at_keyword = match self.input.next_byte()? {
373+
b'@' => {
380374
match self.input.next_including_whitespace_and_comments() {
381-
Ok(&Token::AtKeyword(ref name)) => at_keyword = Some(name.clone()),
382-
_ => at_keyword = None,
383-
}
384-
// FIXME: move this back inside `match` when lifetimes are non-lexical
385-
if at_keyword.is_none() {
386-
self.input.reset(&start)
375+
Ok(&Token::AtKeyword(ref name)) => Some(name.clone()),
376+
_ => {
377+
self.input.reset(&start);
378+
None
379+
},
387380
}
388381
}
389-
Some(_) => at_keyword = None,
390-
None => return None,
391-
}
382+
_ => None,
383+
};
392384

393385
if let Some(name) = at_keyword {
394386
let first_stylesheet_rule = self.is_stylesheet && !self.any_rule_so_far;
@@ -444,20 +436,17 @@ where
444436
input.parse_entirely(|input| {
445437
input.skip_whitespace();
446438
let start = input.state();
447-
448-
let at_keyword;
449-
if input.next_byte() == Some(b'@') {
439+
let at_keyword = if input.next_byte() == Some(b'@') {
450440
match *input.next_including_whitespace_and_comments()? {
451-
Token::AtKeyword(ref name) => at_keyword = Some(name.clone()),
452-
_ => at_keyword = None,
453-
}
454-
// FIXME: move this back inside `match` when lifetimes are non-lexical
455-
if at_keyword.is_none() {
456-
input.reset(&start)
441+
Token::AtKeyword(ref name) => Some(name.clone()),
442+
_ => {
443+
input.reset(&start);
444+
None
445+
}
457446
}
458447
} else {
459-
at_keyword = None
460-
}
448+
None
449+
};
461450

462451
if let Some(name) = at_keyword {
463452
parse_at_rule(&start, name, input, parser).map_err(|e| e.0)

src/unicode_range.rs

+5-3
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,13 @@ impl UnicodeRange {
6060
fn parse_tokens<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(), BasicParseError<'i>> {
6161
match input.next_including_whitespace()?.clone() {
6262
Token::Delim('+') => {
63-
// FIXME: remove .clone() when lifetimes are non-lexical.
64-
match input.next_including_whitespace()?.clone() {
63+
match *input.next_including_whitespace()? {
6564
Token::Ident(_) => {}
6665
Token::Delim('?') => {}
67-
t => return Err(input.new_basic_unexpected_token_error(t)),
66+
ref t => {
67+
let t = t.clone();
68+
return Err(input.new_basic_unexpected_token_error(t));
69+
}
6870
}
6971
parse_question_marks(input)
7072
}

0 commit comments

Comments
 (0)