Replace nth_char(0) with next() in cursor.first()

and optimize the iterator returned by `tokenize().

This improves lexer performance by 35%
This commit is contained in:
Julian Wollersberger 2021-11-30 16:06:58 +01:00
parent 72d66064e7
commit 1f147a2ed7
2 changed files with 28 additions and 25 deletions

View file

@ -225,14 +225,15 @@ pub fn first_token(input: &str) -> Token {
}
/// Creates an iterator that produces tokens from the input string.
pub fn tokenize(mut input: &str) -> impl Iterator<Item = Token> + '_ {
pub fn tokenize(input: &str) -> impl Iterator<Item = Token> + '_ {
let mut cursor = Cursor::new(input);
std::iter::from_fn(move || {
if input.is_empty() {
return None;
if cursor.is_eof() {
None
} else {
cursor.reset_len_consumed();
Some(cursor.advance_token())
}
let token = first_token(input);
input = &input[token.len..];
Some(token)
})
}
@ -808,11 +809,4 @@ impl Cursor<'_> {
self.eat_while(is_id_continue);
}
/// Eats symbols while predicate returns true or until the end of file is reached.
fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) {
while predicate(self.first()) && !self.is_eof() {
self.bump();
}
}
}