Skip to content

Commit b4b95f2

Browse files
committed
Adapt to newer nightly toolchain
1 parent e64c2ae commit b4b95f2

File tree

7 files changed

+62
-35
lines changed

7 files changed

+62
-35
lines changed

build.rs

+2-7
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,5 @@
1-
use std::convert::TryInto;
21
use std::env;
3-
use std::env::VarError;
42
use std::error::Error;
5-
use std::fs::{read_dir, DirEntry, File};
6-
use std::io::Write;
7-
use std::path::Path;
83
use std::process::Command;
94

105
fn main() {
@@ -33,14 +28,14 @@ fn gen_for_grammar(
3328
grammar_file_name: &str,
3429
antlr_path: &str,
3530
additional_arg: Option<&str>,
36-
) -> Result<(), Box<Error>> {
31+
) -> Result<(), Box<dyn Error>> {
3732
// let out_dir = env::var("OUT_DIR").unwrap();
3833
// let dest_path = Path::new(&out_dir);
3934

4035
let input = env::current_dir().unwrap().join("grammars");
4136
let file_name = grammar_file_name.to_owned() + ".g4";
4237

43-
let c = Command::new("java")
38+
Command::new("java")
4439
.current_dir(input)
4540
.arg("-cp")
4641
.arg(antlr_path)

src/atn.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ impl Debug for ATN {
5555
}
5656

5757
impl ATN {
58-
crate fn new_atn(grammar_type: ATNType, max_token_type: isize) -> ATN {
58+
pub(crate) fn new_atn(grammar_type: ATNType, max_token_type: isize) -> ATN {
5959
ATN {
6060
decision_to_state: Vec::new(),
6161
grammar_type,
@@ -93,7 +93,7 @@ impl ATN {
9393
analyzer.look::<Ctx>(s, None, _ctx)
9494
}
9595

96-
crate fn add_state(&mut self, state: Box<dyn ATNState>) {
96+
pub(crate) fn add_state(&mut self, state: Box<dyn ATNState>) {
9797
debug_assert_eq!(state.get_state_number(), self.states.len());
9898
self.states.push(state)
9999
}

src/atn_deserialization_options.rs

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
#[allow(unused)]
12
#[derive(Debug)]
23
pub struct ATNDeserializationOptions {
34
read_only: bool,

src/errors.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ impl InputMisMatchError {
216216

217217
/// See `ANTLRError::PredicateError`
218218
#[derive(Debug, Clone)]
219-
#[allow(missing_docs)]
219+
#[allow(missing_docs, unused)]
220220
pub struct FailedPredicateError {
221221
pub base: BaseRecognitionError,
222222
pub rule_index: isize,

src/lexer.rs

+56-22
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,9 @@ pub struct BaseLexer<
110110
}
111111

112112
#[derive(Debug)]
113-
crate struct LexerPosition {
114-
crate line: Cell<isize>,
115-
crate char_position_in_line: Cell<isize>,
113+
pub(crate) struct LexerPosition {
114+
pub(crate) line: Cell<isize>,
115+
pub(crate) char_position_in_line: Cell<isize>,
116116
}
117117

118118
impl<'input, T, Input, TF> Deref for BaseLexer<'input, T, Input, TF>
@@ -123,7 +123,9 @@ where
123123
{
124124
type Target = T;
125125

126-
fn deref(&self) -> &Self::Target { &self.recog }
126+
fn deref(&self) -> &Self::Target {
127+
&self.recog
128+
}
127129
}
128130

129131
impl<'input, T, Input, TF> DerefMut for BaseLexer<'input, T, Input, TF>
@@ -132,7 +134,9 @@ where
132134
Input: CharStream<TF::From>,
133135
TF: TokenFactory<'input>,
134136
{
135-
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.recog }
137+
fn deref_mut(&mut self) -> &mut Self::Target {
138+
&mut self.recog
139+
}
136140
}
137141

138142
impl<'input, T, Input, TF> Recognizer<'input> for BaseLexer<'input, T, Input, TF>
@@ -178,16 +182,18 @@ pub use super::token::TOKEN_DEFAULT_CHANNEL as LEXER_DEFAULT_TOKEN_CHANNEL;
178182
#[doc(inline)]
179183
pub use super::token::TOKEN_HIDDEN_CHANNEL as LEXER_HIDDEN;
180184

181-
crate const LEXER_MIN_CHAR_VALUE: isize = 0x0000;
182-
crate const LEXER_MAX_CHAR_VALUE: isize = 0x10FFFF;
185+
pub(crate) const LEXER_MIN_CHAR_VALUE: isize = 0x0000;
186+
pub(crate) const LEXER_MAX_CHAR_VALUE: isize = 0x10FFFF;
183187

184188
impl<'input, T, Input, TF> BaseLexer<'input, T, Input, TF>
185189
where
186190
T: LexerRecog<'input, Self> + 'static,
187191
Input: CharStream<TF::From>,
188192
TF: TokenFactory<'input>,
189193
{
190-
fn emit_token(&mut self, token: TF::Tok) { self.token = Some(token); }
194+
fn emit_token(&mut self, token: TF::Tok) {
195+
self.token = Some(token);
196+
}
191197

192198
fn emit(&mut self) {
193199
<T as LexerRecog<Self>>::before_emit(self);
@@ -220,7 +226,9 @@ where
220226
}
221227

222228
/// Current position in input stream
223-
pub fn get_char_index(&self) -> isize { self.input.as_ref().unwrap().index() }
229+
pub fn get_char_index(&self) -> isize {
230+
self.input.as_ref().unwrap().index()
231+
}
224232

225233
/// Current token text
226234
pub fn get_text<'a>(&'a self) -> Cow<'a, TF::Data>
@@ -242,7 +250,9 @@ where
242250
}
243251

244252
/// Used from lexer actions to override text of the token that will be emitted next
245-
pub fn set_text(&mut self, _text: <TF::Data as ToOwned>::Owned) { self.text = Some(_text); }
253+
pub fn set_text(&mut self, _text: <TF::Data as ToOwned>::Owned) {
254+
self.text = Some(_text);
255+
}
246256

247257
// fn get_all_tokens(&mut self) -> Vec<TF::Tok> { unimplemented!() }
248258

@@ -254,7 +264,9 @@ where
254264
}
255265

256266
/// Remove and drop all error listeners
257-
pub fn remove_error_listeners(&mut self) { self.error_listeners.borrow_mut().clear(); }
267+
pub fn remove_error_listeners(&mut self) {
268+
self.error_listeners.borrow_mut().clear();
269+
}
258270

259271
/// Creates new lexer instance
260272
pub fn new_base_lexer(
@@ -375,9 +387,13 @@ where
375387
self.token.take().unwrap()
376388
}
377389

378-
fn get_line(&self) -> isize { self.current_pos.line.get() }
390+
fn get_line(&self) -> isize {
391+
self.current_pos.line.get()
392+
}
379393

380-
fn get_char_position_in_line(&self) -> isize { self.current_pos.char_position_in_line.get() }
394+
fn get_char_position_in_line(&self) -> isize {
395+
self.current_pos.char_position_in_line.get()
396+
}
381397

382398
fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> {
383399
match &mut self.input {
@@ -397,7 +413,9 @@ where
397413
// self.factory = f;
398414
// }
399415

400-
fn get_token_factory(&self) -> &'input TF { self.factory }
416+
fn get_token_factory(&self) -> &'input TF {
417+
self.factory
418+
}
401419
}
402420

403421
#[cold]
@@ -440,9 +458,13 @@ where
440458
{
441459
type Input = Input;
442460

443-
fn input(&mut self) -> &mut Self::Input { self.input.as_mut().unwrap() }
461+
fn input(&mut self) -> &mut Self::Input {
462+
self.input.as_mut().unwrap()
463+
}
444464

445-
fn set_channel(&mut self, v: isize) { self.channel = v; }
465+
fn set_channel(&mut self, v: isize) {
466+
self.channel = v;
467+
}
446468

447469
fn push_mode(&mut self, m: usize) {
448470
self.mode_stack.push(self.mode);
@@ -456,15 +478,27 @@ where
456478
})
457479
}
458480

459-
fn set_type(&mut self, t: isize) { self.token_type = t; }
481+
fn set_type(&mut self, t: isize) {
482+
self.token_type = t;
483+
}
460484

461-
fn set_mode(&mut self, m: usize) { self.mode = m; }
485+
fn set_mode(&mut self, m: usize) {
486+
self.mode = m;
487+
}
462488

463-
fn more(&mut self) { self.set_type(LEXER_MORE) }
489+
fn more(&mut self) {
490+
self.set_type(LEXER_MORE)
491+
}
464492

465-
fn skip(&mut self) { self.set_type(LEXER_SKIP) }
493+
fn skip(&mut self) {
494+
self.set_type(LEXER_SKIP)
495+
}
466496

467-
fn reset(&mut self) { unimplemented!() }
497+
fn reset(&mut self) {
498+
unimplemented!()
499+
}
468500

469-
fn get_interpreter(&self) -> Option<&LexerATNSimulator> { self.interpreter.as_deref() }
501+
fn get_interpreter(&self) -> Option<&LexerATNSimulator> {
502+
self.interpreter.as_deref()
503+
}
470504
}

src/lib.rs

-2
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,9 @@
88
#![feature(coerce_unsized)]
99
#![feature(associated_type_defaults)]
1010
#![feature(generic_associated_types)]
11-
#![feature(crate_visibility_modifier)]
1211
// #![feature(generic_associated_types)]
1312
#![warn(rust_2018_idioms)]
1413
#![warn(missing_docs)] // warn if there is missing docs
15-
#![warn(missing_debug_implementations)]
1614
#![warn(trivial_numeric_casts)]
1715
#![allow(incomplete_features)]
1816

src/token_factory.rs

-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ use crate::token::Token;
1414
use crate::token::{CommonToken, OwningToken, TOKEN_INVALID_TYPE};
1515
use better_any::{Tid, TidAble};
1616

17-
#[allow(non_upper_case_globals)]
1817
lazy_static! {
1918
pub(crate) static ref COMMON_TOKEN_FACTORY_DEFAULT: Box<CommonTokenFactory> =
2019
Box::new(CommonTokenFactory {});

0 commit comments

Comments
 (0)