Skip to content

Commit 0fe6aae

Browse files
buffer errors from initial tokenization when parsing
1 parent 8a3b5e9 commit 0fe6aae

File tree

3 files changed

+79
-7
lines changed

3 files changed

+79
-7
lines changed

src/librustdoc/test.rs

+11-4
Original file line numberDiff line numberDiff line change
@@ -422,13 +422,20 @@ pub fn make_test(s: &str,
422422

423423
debug!("about to parse: \n{}", source);
424424

425-
// FIXME(misdreavus): this can still emit a FatalError (and thus halt rustdoc prematurely)
426-
// if there is a lexing error in the first token
427-
let mut parser = parse::new_parser_from_source_str(&sess, filename, source);
428-
429425
let mut found_main = false;
430426
let mut found_extern_crate = cratename.is_none();
431427

428+
let mut parser = match parse::maybe_new_parser_from_source_str(&sess, filename, source) {
429+
Ok(p) => p,
430+
Err(errs) => {
431+
for mut err in errs {
432+
err.cancel();
433+
}
434+
435+
return (found_main, found_extern_crate);
436+
}
437+
};
438+
432439
loop {
433440
match parser.parse_item() {
434441
Ok(Some(item)) => {

src/libsyntax/parse/lexer/mod.rs

+22-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
use ast::{self, Ident};
1212
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
1313
use source_map::{SourceMap, FilePathMapping};
14-
use errors::{Applicability, FatalError, DiagnosticBuilder};
14+
use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
1515
use parse::{token, ParseSess};
1616
use str::char_at;
1717
use symbol::{Symbol, keywords};
@@ -175,6 +175,16 @@ impl<'a> StringReader<'a> {
175175
self.fatal_errs.clear();
176176
}
177177

178+
pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
179+
let mut buffer = Vec::new();
180+
181+
for err in self.fatal_errs.drain(..) {
182+
err.buffer(&mut buffer);
183+
}
184+
185+
buffer
186+
}
187+
178188
pub fn peek(&self) -> TokenAndSpan {
179189
// FIXME(pcwalton): Bad copy!
180190
TokenAndSpan {
@@ -251,6 +261,17 @@ impl<'a> StringReader<'a> {
251261
Ok(sr)
252262
}
253263

264+
pub fn new_or_buffered_errs(sess: &'a ParseSess,
265+
source_file: Lrc<syntax_pos::SourceFile>,
266+
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
267+
let mut sr = StringReader::new_raw(sess, source_file, override_span);
268+
if sr.advance_token().is_err() {
269+
Err(sr.buffer_fatal_errors())
270+
} else {
271+
Ok(sr)
272+
}
273+
}
274+
254275
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
255276
let begin = sess.source_map().lookup_byte_offset(span.lo());
256277
let end = sess.source_map().lookup_byte_offset(span.hi());

src/libsyntax/parse/mod.rs

+46-2
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use ast::{self, CrateConfig, NodeId};
1515
use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
1616
use source_map::{SourceMap, FilePathMapping};
1717
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
18-
use errors::{Handler, ColorConfig, DiagnosticBuilder};
18+
use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
1919
use feature_gate::UnstableFeatures;
2020
use parse::parser::Parser;
2121
use ptr::P;
@@ -174,14 +174,25 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
174174
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
175175
}
176176

177-
// Create a new parser from a source string
177+
/// Create a new parser from a source string
178178
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
179179
-> Parser {
180180
let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source));
181181
parser.recurse_into_file_modules = false;
182182
parser
183183
}
184184

185+
/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
186+
/// token stream.
187+
pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
188+
-> Result<Parser, Vec<Diagnostic>>
189+
{
190+
let mut parser = maybe_source_file_to_parser(sess,
191+
sess.source_map().new_source_file(name, source))?;
192+
parser.recurse_into_file_modules = false;
193+
Ok(parser)
194+
}
195+
185196
/// Create a new parser, handling errors as appropriate
186197
/// if the file doesn't exist
187198
pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
@@ -214,6 +225,21 @@ fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Par
214225
parser
215226
}
216227

228+
/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
229+
/// initial token stream.
230+
fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
231+
-> Result<Parser, Vec<Diagnostic>>
232+
{
233+
let end_pos = source_file.end_pos;
234+
let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
235+
236+
if parser.token == token::Eof && parser.span.is_dummy() {
237+
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
238+
}
239+
240+
Ok(parser)
241+
}
242+
217243
// must preserve old name for now, because quote! from the *existing*
218244
// compiler expands into it
219245
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
@@ -248,6 +274,24 @@ pub fn source_file_to_stream(sess: &ParseSess,
248274
panictry!(srdr.parse_all_token_trees())
249275
}
250276

277+
/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
278+
/// parsing the token tream.
279+
pub fn maybe_file_to_stream(sess: &ParseSess,
280+
source_file: Lrc<SourceFile>,
281+
override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
282+
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
283+
srdr.real_token();
284+
285+
match srdr.parse_all_token_trees() {
286+
Ok(stream) => Ok(stream),
287+
Err(err) => {
288+
let mut buffer = Vec::with_capacity(1);
289+
err.buffer(&mut buffer);
290+
Err(buffer)
291+
}
292+
}
293+
}
294+
251295
/// Given stream and the `ParseSess`, produce a parser
252296
pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
253297
Parser::new(sess, stream, None, true, false)

0 commit comments

Comments
 (0)