Remove Lexer
's dependency on Parser
.
Lexing precedes parsing, as you'd expect: `Lexer` creates a `TokenStream` and `Parser` then parses that `TokenStream`. But, in a horrendous violation of layering abstractions and common sense, `Lexer` depends on `Parser`! The `Lexer::unclosed_delim_err` method does some error recovery that relies on creating a `Parser` to do some post-processing of the `TokenStream` that the `Lexer` just created. This commit just removes `unclosed_delim_err`. This change removes `Lexer`'s dependency on `Parser`, and also means that `lex_token_tree`'s return value can have a more typical form. The cost is slightly worse error messages in two obscure cases, as shown in these tests: - tests/ui/parser/brace-in-let-chain.rs: there is slightly less explanation in this case involving an extra `{`. - tests/ui/parser/diff-markers/unclosed-delims{,-in-macro}.rs: the diff marker detection is no longer supported (because that detection is implemented in the parser). In my opinion this cost is outweighed by the magnitude of the code cleanup.
This commit is contained in:
parent
a94fce97e3
commit
2e412fef75
8 changed files with 67 additions and 173 deletions
|
@ -69,24 +69,30 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
|
|||
token: Token::dummy(),
|
||||
diag_info: TokenTreeDiagInfo::default(),
|
||||
};
|
||||
let (_open_spacing, stream, res) = lexer.lex_token_trees(/* is_delimited */ false);
|
||||
let unmatched_delims = lexer.diag_info.unmatched_delims;
|
||||
let res = lexer.lex_token_trees(/* is_delimited */ false);
|
||||
|
||||
if res.is_ok() && unmatched_delims.is_empty() {
|
||||
Ok(stream)
|
||||
} else {
|
||||
// Return error if there are unmatched delimiters or unclosed delimiters.
|
||||
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
|
||||
// because the delimiter mismatch is more likely to be the root cause of error
|
||||
let mut buffer: Vec<_> = unmatched_delims
|
||||
.into_iter()
|
||||
.filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
|
||||
.collect();
|
||||
if let Err(errs) = res {
|
||||
// Add unclosing delimiter or diff marker errors
|
||||
buffer.extend(errs);
|
||||
let mut unmatched_delims: Vec<_> = lexer
|
||||
.diag_info
|
||||
.unmatched_delims
|
||||
.into_iter()
|
||||
.filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
|
||||
.collect();
|
||||
|
||||
match res {
|
||||
Ok((_open_spacing, stream)) => {
|
||||
if unmatched_delims.is_empty() {
|
||||
Ok(stream)
|
||||
} else {
|
||||
// Return error if there are unmatched delimiters or unclosed delimiters.
|
||||
Err(unmatched_delims)
|
||||
}
|
||||
}
|
||||
Err(errs) => {
|
||||
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
|
||||
// because the delimiter mismatch is more likely to be the root cause of error
|
||||
unmatched_delims.extend(errs);
|
||||
Err(unmatched_delims)
|
||||
}
|
||||
Err(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue