1
Fork 0

Crate-ify and delete unused code in syntax::parse

This commit is contained in:
Mark Simulacrum 2018-05-31 16:53:30 -06:00
parent 61d88318aa
commit 60058e5dbe
10 changed files with 191 additions and 365 deletions

View file

@ -25,6 +25,7 @@
#![feature(const_atomic_usize_new)] #![feature(const_atomic_usize_new)]
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![feature(str_escape)] #![feature(str_escape)]
#![feature(crate_visibility_modifier)]
#![recursion_limit="256"] #![recursion_limit="256"]

View file

@ -11,8 +11,7 @@
use attr; use attr;
use ast; use ast;
use codemap::respan; use codemap::respan;
use parse::common::SeqSep; use parse::{SeqSep, PResult};
use parse::PResult;
use parse::token::{self, Nonterminal}; use parse::token::{self, Nonterminal};
use parse::parser::{Parser, TokenType, PathStyle}; use parse::parser::{Parser, TokenType, PathStyle};
use tokenstream::TokenStream; use tokenstream::TokenStream;
@ -28,7 +27,7 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &'static str = "an inner attribute
impl<'a> Parser<'a> { impl<'a> Parser<'a> {
/// Parse attributes that appear before an item /// Parse attributes that appear before an item
pub fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = Vec::new(); let mut attrs: Vec<ast::Attribute> = Vec::new();
let mut just_parsed_doc_comment = false; let mut just_parsed_doc_comment = false;
loop { loop {
@ -139,7 +138,7 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { crate fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
let meta = match self.token { let meta = match self.token {
token::Interpolated(ref nt) => match nt.0 { token::Interpolated(ref nt) => match nt.0 {
Nonterminal::NtMeta(ref meta) => Some(meta.clone()), Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
@ -160,7 +159,7 @@ impl<'a> Parser<'a> {
/// terminated by a semicolon. /// terminated by a semicolon.
/// matches inner_attrs* /// matches inner_attrs*
pub fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = vec![]; let mut attrs: Vec<ast::Attribute> = vec![];
loop { loop {
match self.token { match self.token {
@ -231,7 +230,7 @@ impl<'a> Parser<'a> {
Ok(ast::MetaItem { ident, node, span }) Ok(ast::MetaItem { ident, node, span })
} }
pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) { Ok(if self.eat(&token::Eq) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
} else if self.eat(&token::OpenDelim(token::Paren)) { } else if self.eat(&token::OpenDelim(token::Paren)) {

View file

@ -1,36 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Common routines shared by parser mods
use parse::token;
/// `SeqSep` : a sequence separator (token)
/// and whether a trailing separator is allowed.
pub struct SeqSep {
pub sep: Option<token::Token>,
pub trailing_sep_allowed: bool,
}
impl SeqSep {
pub fn trailing_allowed(t: token::Token) -> SeqSep {
SeqSep {
sep: Some(t),
trailing_sep_allowed: true,
}
}
pub fn none() -> SeqSep {
SeqSep {
sep: None,
trailing_sep_allowed: false,
}
}
}

View file

@ -40,7 +40,7 @@ pub struct Comment {
pub pos: BytePos, pub pos: BytePos,
} }
pub fn is_doc_comment(s: &str) -> bool { fn is_doc_comment(s: &str) -> bool {
(s.starts_with("///") && super::is_doc_comment(s)) || s.starts_with("//!") || (s.starts_with("///") && super::is_doc_comment(s)) || s.starts_with("//!") ||
(s.starts_with("/**") && is_block_doc_comment(s)) || s.starts_with("/*!") (s.starts_with("/**") && is_block_doc_comment(s)) || s.starts_with("/*!")
} }

View file

@ -51,16 +51,16 @@ pub struct StringReader<'a> {
pub ch: Option<char>, pub ch: Option<char>,
pub filemap: Lrc<syntax_pos::FileMap>, pub filemap: Lrc<syntax_pos::FileMap>,
/// Stop reading src at this index. /// Stop reading src at this index.
pub end_src_index: usize, end_src_index: usize,
/// Whether to record new-lines and multibyte chars in filemap. /// Whether to record new-lines and multibyte chars in filemap.
/// This is only necessary the first time a filemap is lexed. /// This is only necessary the first time a filemap is lexed.
/// If part of a filemap is being re-lexed, this should be set to false. /// If part of a filemap is being re-lexed, this should be set to false.
pub save_new_lines_and_multibyte: bool, save_new_lines_and_multibyte: bool,
// cached: // cached:
peek_tok: token::Token, peek_tok: token::Token,
peek_span: Span, peek_span: Span,
peek_span_src_raw: Span, peek_span_src_raw: Span,
pub fatal_errs: Vec<DiagnosticBuilder<'a>>, fatal_errs: Vec<DiagnosticBuilder<'a>>,
// cache a direct reference to the source text, so that we don't have to // cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
src: Lrc<String>, src: Lrc<String>,
@ -70,7 +70,7 @@ pub struct StringReader<'a> {
/// The raw source span which *does not* take `override_span` into account /// The raw source span which *does not* take `override_span` into account
span_src_raw: Span, span_src_raw: Span,
open_braces: Vec<(token::DelimToken, Span)>, open_braces: Vec<(token::DelimToken, Span)>,
pub override_span: Option<Span>, crate override_span: Option<Span>,
} }
impl<'a> StringReader<'a> { impl<'a> StringReader<'a> {
@ -163,11 +163,9 @@ impl<'a> StringReader<'a> {
sp: self.peek_span, sp: self.peek_span,
} }
} }
}
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch /// For comments.rs, which hackily pokes into next_pos and ch
pub fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>, fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>,
override_span: Option<Span>) -> Self { override_span: Option<Span>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, filemap, override_span); let mut sr = StringReader::new_raw_internal(sess, filemap, override_span);
sr.bump(); sr.bump();
@ -240,17 +238,17 @@ impl<'a> StringReader<'a> {
sr sr
} }
pub fn ch_is(&self, c: char) -> bool { fn ch_is(&self, c: char) -> bool {
self.ch == Some(c) self.ch == Some(c)
} }
/// Report a fatal lexical error with a given span. /// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> FatalError { fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
self.sess.span_diagnostic.span_fatal(sp, m) self.sess.span_diagnostic.span_fatal(sp, m)
} }
/// Report a lexical error with a given span. /// Report a lexical error with a given span.
pub fn err_span(&self, sp: Span, m: &str) { fn err_span(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_err(sp, m) self.sess.span_diagnostic.span_err(sp, m)
} }
@ -375,7 +373,7 @@ impl<'a> StringReader<'a> {
/// Calls `f` with a string slice of the source text spanning from `start` /// Calls `f` with a string slice of the source text spanning from `start`
/// up to but excluding `self.pos`, meaning the slice does not include /// up to but excluding `self.pos`, meaning the slice does not include
/// the character `self.ch`. /// the character `self.ch`.
pub fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T
where F: FnOnce(&str) -> T where F: FnOnce(&str) -> T
{ {
self.with_str_from_to(start, self.pos, f) self.with_str_from_to(start, self.pos, f)
@ -384,13 +382,13 @@ impl<'a> StringReader<'a> {
/// Create a Name from a given offset to the current offset, each /// Create a Name from a given offset to the current offset, each
/// adjusted 1 towards each other (assumes that on either side there is a /// adjusted 1 towards each other (assumes that on either side there is a
/// single-byte delimiter). /// single-byte delimiter).
pub fn name_from(&self, start: BytePos) -> ast::Name { fn name_from(&self, start: BytePos) -> ast::Name {
debug!("taking an ident from {:?} to {:?}", start, self.pos); debug!("taking an ident from {:?} to {:?}", start, self.pos);
self.with_str_from(start, Symbol::intern) self.with_str_from(start, Symbol::intern)
} }
/// As name_from, with an explicit endpoint. /// As name_from, with an explicit endpoint.
pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name { fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
debug!("taking an ident from {:?} to {:?}", start, end); debug!("taking an ident from {:?} to {:?}", start, end);
self.with_str_from_to(start, end, Symbol::intern) self.with_str_from_to(start, end, Symbol::intern)
} }
@ -454,7 +452,7 @@ impl<'a> StringReader<'a> {
/// Advance the StringReader by one character. If a newline is /// Advance the StringReader by one character. If a newline is
/// discovered, add it to the FileMap's list of line start offsets. /// discovered, add it to the FileMap's list of line start offsets.
pub fn bump(&mut self) { crate fn bump(&mut self) {
let next_src_index = self.src_index(self.next_pos); let next_src_index = self.src_index(self.next_pos);
if next_src_index < self.end_src_index { if next_src_index < self.end_src_index {
let next_ch = char_at(&self.src, next_src_index); let next_ch = char_at(&self.src, next_src_index);
@ -481,7 +479,7 @@ impl<'a> StringReader<'a> {
} }
} }
pub fn nextch(&self) -> Option<char> { fn nextch(&self) -> Option<char> {
let next_src_index = self.src_index(self.next_pos); let next_src_index = self.src_index(self.next_pos);
if next_src_index < self.end_src_index { if next_src_index < self.end_src_index {
Some(char_at(&self.src, next_src_index)) Some(char_at(&self.src, next_src_index))
@ -490,11 +488,11 @@ impl<'a> StringReader<'a> {
} }
} }
pub fn nextch_is(&self, c: char) -> bool { fn nextch_is(&self, c: char) -> bool {
self.nextch() == Some(c) self.nextch() == Some(c)
} }
pub fn nextnextch(&self) -> Option<char> { fn nextnextch(&self) -> Option<char> {
let next_src_index = self.src_index(self.next_pos); let next_src_index = self.src_index(self.next_pos);
if next_src_index < self.end_src_index { if next_src_index < self.end_src_index {
let next_next_src_index = let next_next_src_index =
@ -506,7 +504,7 @@ impl<'a> StringReader<'a> {
None None
} }
pub fn nextnextch_is(&self, c: char) -> bool { fn nextnextch_is(&self, c: char) -> bool {
self.nextnextch() == Some(c) self.nextnextch() == Some(c)
} }
@ -1732,7 +1730,7 @@ impl<'a> StringReader<'a> {
// This tests the character for the unicode property 'PATTERN_WHITE_SPACE' which // This tests the character for the unicode property 'PATTERN_WHITE_SPACE' which
// is guaranteed to be forward compatible. http://unicode.org/reports/tr31/#R3 // is guaranteed to be forward compatible. http://unicode.org/reports/tr31/#R3
pub fn is_pattern_whitespace(c: Option<char>) -> bool { crate fn is_pattern_whitespace(c: Option<char>) -> bool {
c.map_or(false, Pattern_White_Space) c.map_or(false, Pattern_White_Space)
} }
@ -1747,14 +1745,14 @@ fn is_dec_digit(c: Option<char>) -> bool {
in_range(c, '0', '9') in_range(c, '0', '9')
} }
pub fn is_doc_comment(s: &str) -> bool { fn is_doc_comment(s: &str) -> bool {
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') || let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') ||
s.starts_with("//!"); s.starts_with("//!");
debug!("is {:?} a doc comment? {}", s, res); debug!("is {:?} a doc comment? {}", s, res);
res res
} }
pub fn is_block_doc_comment(s: &str) -> bool { fn is_block_doc_comment(s: &str) -> bool {
// Prevent `/**/` from being parsed as a doc comment // Prevent `/**/` from being parsed as a doc comment
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') || let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') ||
s.starts_with("/*!")) && s.len() >= 5; s.starts_with("/*!")) && s.len() >= 5;

View file

@ -15,7 +15,7 @@ use tokenstream::{Delimited, TokenStream, TokenTree};
impl<'a> StringReader<'a> { impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
let mut tts = Vec::new(); let mut tts = Vec::new();
while self.token != token::Eof { while self.token != token::Eof {
tts.push(self.parse_token_tree()?); tts.push(self.parse_token_tree()?);

View file

@ -333,7 +333,7 @@ const ASCII_ARRAY: &'static [(char, &'static str)] = &[
('=', "Equals Sign"), ('=', "Equals Sign"),
('>', "Greater-Than Sign"), ]; ('>', "Greater-Than Sign"), ];
pub fn check_for_substitution<'a>(reader: &StringReader<'a>, crate fn check_for_substitution<'a>(reader: &StringReader<'a>,
ch: char, ch: char,
err: &mut DiagnosticBuilder<'a>) -> bool { err: &mut DiagnosticBuilder<'a>) -> bool {
UNICODE_ARRAY UNICODE_ARRAY

View file

@ -38,7 +38,6 @@ pub mod lexer;
pub mod token; pub mod token;
pub mod attr; pub mod attr;
pub mod common;
pub mod classify; pub mod classify;
/// Info about a parsing session. /// Info about a parsing session.
@ -51,7 +50,7 @@ pub struct ParseSess {
/// raw identifiers /// raw identifiers
pub raw_identifier_spans: Lock<Vec<Span>>, pub raw_identifier_spans: Lock<Vec<Span>>,
/// The registered diagnostics codes /// The registered diagnostics codes
pub registered_diagnostics: Lock<ErrorMap>, crate registered_diagnostics: Lock<ErrorMap>,
// Spans where a `mod foo;` statement was included in a non-mod.rs file. // Spans where a `mod foo;` statement was included in a non-mod.rs file.
// These are used to issue errors if the non_modrs_mods feature is not enabled. // These are used to issue errors if the non_modrs_mods feature is not enabled.
pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>, pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>,
@ -131,7 +130,7 @@ pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &
new_parser_from_source_str(sess, name, source).parse_inner_attributes() new_parser_from_source_str(sess, name, source).parse_inner_attributes()
} }
pub fn parse_expr_from_source_str(name: FileName, source: String, sess: &ParseSess) crate fn parse_expr_from_source_str(name: FileName, source: String, sess: &ParseSess)
-> PResult<P<ast::Expr>> { -> PResult<P<ast::Expr>> {
new_parser_from_source_str(sess, name, source).parse_expr() new_parser_from_source_str(sess, name, source).parse_expr()
} }
@ -140,17 +139,12 @@ pub fn parse_expr_from_source_str(name: FileName, source: String, sess: &ParseSe
/// ///
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
/// when a syntax error occurred. /// when a syntax error occurred.
pub fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) crate fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
-> PResult<Option<P<ast::Item>>> { -> PResult<Option<P<ast::Item>>> {
new_parser_from_source_str(sess, name, source).parse_item() new_parser_from_source_str(sess, name, source).parse_item()
} }
pub fn parse_meta_from_source_str(name: FileName, source: String, sess: &ParseSess) crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &ParseSess)
-> PResult<ast::MetaItem> {
new_parser_from_source_str(sess, name, source).parse_meta_item()
}
pub fn parse_stmt_from_source_str(name: FileName, source: String, sess: &ParseSess)
-> PResult<Option<ast::Stmt>> { -> PResult<Option<ast::Stmt>> {
new_parser_from_source_str(sess, name, source).parse_stmt() new_parser_from_source_str(sess, name, source).parse_stmt()
} }
@ -178,7 +172,7 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a>
/// Given a session, a crate config, a path, and a span, add /// Given a session, a crate config, a path, and a span, add
/// the file at the given path to the codemap, and return a parser. /// the file at the given path to the codemap, and return a parser.
/// On an error, use the given span as the source of the problem. /// On an error, use the given span as the source of the problem.
pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
path: &Path, path: &Path,
directory_ownership: DirectoryOwnership, directory_ownership: DirectoryOwnership,
module_name: Option<String>, module_name: Option<String>,
@ -190,7 +184,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
} }
/// Given a filemap and config, return a parser /// Given a filemap and config, return a parser
pub fn filemap_to_parser(sess: & ParseSess, filemap: Lrc<FileMap>) -> Parser { fn filemap_to_parser(sess: & ParseSess, filemap: Lrc<FileMap>) -> Parser {
let end_pos = filemap.end_pos; let end_pos = filemap.end_pos;
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
@ -243,7 +237,7 @@ pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
/// Rather than just accepting/rejecting a given literal, unescapes it as /// Rather than just accepting/rejecting a given literal, unescapes it as
/// well. Can take any slice prefixed by a character escape. Returns the /// well. Can take any slice prefixed by a character escape. Returns the
/// character and the number of characters consumed. /// character and the number of characters consumed.
pub fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) { fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) {
use std::char; use std::char;
// Handle non-escaped chars first. // Handle non-escaped chars first.
@ -300,7 +294,7 @@ pub fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) {
/// Parse a string representing a string literal into its final form. Does /// Parse a string representing a string literal into its final form. Does
/// unescaping. /// unescaping.
pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
debug!("str_lit: given {}", lit.escape_default()); debug!("str_lit: given {}", lit.escape_default());
let mut res = String::with_capacity(lit.len()); let mut res = String::with_capacity(lit.len());
@ -369,7 +363,7 @@ pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
/// Parse a string representing a raw string literal into its final form. The /// Parse a string representing a raw string literal into its final form. The
/// only operation this does is convert embedded CRLF into a single LF. /// only operation this does is convert embedded CRLF into a single LF.
pub fn raw_str_lit(lit: &str) -> String { fn raw_str_lit(lit: &str) -> String {
debug!("raw_str_lit: given {}", lit.escape_default()); debug!("raw_str_lit: given {}", lit.escape_default());
let mut res = String::with_capacity(lit.len()); let mut res = String::with_capacity(lit.len());
@ -406,7 +400,7 @@ macro_rules! err {
} }
} }
pub fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>) crate fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
-> (bool /* suffix illegal? */, Option<ast::LitKind>) { -> (bool /* suffix illegal? */, Option<ast::LitKind>) {
use ast::LitKind; use ast::LitKind;
@ -476,7 +470,7 @@ fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span,
} }
}) })
} }
pub fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-> Option<ast::LitKind> { -> Option<ast::LitKind> {
debug!("float_lit: {:?}, {:?}", s, suffix); debug!("float_lit: {:?}, {:?}", s, suffix);
// FIXME #2252: bounds checking float literals is deferred until trans // FIXME #2252: bounds checking float literals is deferred until trans
@ -485,7 +479,7 @@ pub fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>
} }
/// Parse a string representing a byte literal into its final form. Similar to `char_lit` /// Parse a string representing a byte literal into its final form. Similar to `char_lit`
pub fn byte_lit(lit: &str) -> (u8, usize) { fn byte_lit(lit: &str) -> (u8, usize) {
let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i); let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
if lit.len() == 1 { if lit.len() == 1 {
@ -516,7 +510,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
} }
} }
pub fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> { fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> {
let mut res = Vec::with_capacity(lit.len()); let mut res = Vec::with_capacity(lit.len());
let error = |i| format!("lexer should have rejected {} at {}", lit, i); let error = |i| format!("lexer should have rejected {} at {}", lit, i);
@ -575,7 +569,7 @@ pub fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> {
Lrc::new(res) Lrc::new(res)
} }
pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
-> Option<ast::LitKind> { -> Option<ast::LitKind> {
// s can only be ascii, byte indexing is fine // s can only be ascii, byte indexing is fine
@ -1136,3 +1130,26 @@ mod tests {
}); });
} }
} }
/// `SeqSep` : a sequence separator (token)
/// and whether a trailing separator is allowed.
pub struct SeqSep {
pub sep: Option<token::Token>,
pub trailing_sep_allowed: bool,
}
impl SeqSep {
pub fn trailing_allowed(t: token::Token) -> SeqSep {
SeqSep {
sep: Some(t),
trailing_sep_allowed: true,
}
}
pub fn none() -> SeqSep {
SeqSep {
sep: None,
trailing_sep_allowed: false,
}
}
}

View file

@ -23,7 +23,7 @@ use ast::{Field, FnDecl};
use ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use ast::GenericParam; use ast::GenericParam;
use ast::{Ident, ImplItem, IsAuto, Item, ItemKind}; use ast::{Ident, ImplItem, IsAuto, Item, ItemKind};
use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy}; use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind};
use ast::Local; use ast::Local;
use ast::MacStmtStyle; use ast::MacStmtStyle;
use ast::{Mac, Mac_, MacDelimiter}; use ast::{Mac, Mac_, MacDelimiter};
@ -44,8 +44,7 @@ use {ast, attr};
use codemap::{self, CodeMap, Spanned, respan}; use codemap::{self, CodeMap, Spanned, respan};
use syntax_pos::{self, Span, MultiSpan, BytePos, FileName, DUMMY_SP}; use syntax_pos::{self, Span, MultiSpan, BytePos, FileName, DUMMY_SP};
use errors::{self, Applicability, DiagnosticBuilder}; use errors::{self, Applicability, DiagnosticBuilder};
use parse::{self, classify, token}; use parse::{self, SeqSep, classify, token};
use parse::common::SeqSep;
use parse::lexer::TokenAndSpan; use parse::lexer::TokenAndSpan;
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
@ -64,7 +63,7 @@ use std::path::{self, Path, PathBuf};
use std::slice; use std::slice;
bitflags! { bitflags! {
pub struct Restrictions: u8 { struct Restrictions: u8 {
const STMT_EXPR = 1 << 0; const STMT_EXPR = 1 << 0;
const NO_STRUCT_LITERAL = 1 << 1; const NO_STRUCT_LITERAL = 1 << 1;
} }
@ -96,13 +95,13 @@ pub enum PathStyle {
} }
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum SemiColonMode { enum SemiColonMode {
Break, Break,
Ignore, Ignore,
} }
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum BlockMode { enum BlockMode {
Break, Break,
Ignore, Ignore,
} }
@ -223,22 +222,22 @@ pub struct Parser<'a> {
/// the span of the current token: /// the span of the current token:
pub span: Span, pub span: Span,
/// the span of the previous token: /// the span of the previous token:
pub meta_var_span: Option<Span>, meta_var_span: Option<Span>,
pub prev_span: Span, pub prev_span: Span,
/// the previous token kind /// the previous token kind
prev_token_kind: PrevTokenKind, prev_token_kind: PrevTokenKind,
pub restrictions: Restrictions, restrictions: Restrictions,
/// Used to determine the path to externally loaded source files /// Used to determine the path to externally loaded source files
pub directory: Directory<'a>, crate directory: Directory<'a>,
/// Whether to parse sub-modules in other files. /// Whether to parse sub-modules in other files.
pub recurse_into_file_modules: bool, pub recurse_into_file_modules: bool,
/// Name of the root module this parser originated from. If `None`, then the /// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending /// name is not known. This does not change while the parser is descending
/// into modules, and sub-parsers have new values for this name. /// into modules, and sub-parsers have new values for this name.
pub root_module_name: Option<String>, pub root_module_name: Option<String>,
pub expected_tokens: Vec<TokenType>, crate expected_tokens: Vec<TokenType>,
token_cursor: TokenCursor, token_cursor: TokenCursor,
pub desugar_doc_comments: bool, desugar_doc_comments: bool,
/// Whether we should configure out of line modules as we parse. /// Whether we should configure out of line modules as we parse.
pub cfg_mods: bool, pub cfg_mods: bool,
} }
@ -377,7 +376,7 @@ impl TokenCursor {
} }
#[derive(PartialEq, Eq, Clone)] #[derive(PartialEq, Eq, Clone)]
pub enum TokenType { crate enum TokenType {
Token(token::Token), Token(token::Token),
Keyword(keywords::Keyword), Keyword(keywords::Keyword),
Operator, Operator,
@ -390,7 +389,7 @@ pub enum TokenType {
impl TokenType { impl TokenType {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match *self { match *self {
TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw.name()), TokenType::Keyword(kw) => format!("`{}`", kw.name()),
TokenType::Operator => "an operator".to_string(), TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(), TokenType::Lifetime => "lifetime".to_string(),
@ -413,8 +412,8 @@ fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
/// Information about the path to a module. /// Information about the path to a module.
pub struct ModulePath { pub struct ModulePath {
pub name: String, name: String,
pub path_exists: bool, path_exists: bool,
pub result: Result<ModulePathSuccess, Error>, pub result: Result<ModulePathSuccess, Error>,
} }
@ -424,11 +423,6 @@ pub struct ModulePathSuccess {
warn: bool, warn: bool,
} }
pub struct ModulePathError {
pub err_msg: String,
pub help_msg: String,
}
pub enum Error { pub enum Error {
FileNotFoundForModule { FileNotFoundForModule {
mod_name: String, mod_name: String,
@ -446,7 +440,7 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn span_err<S: Into<MultiSpan>>(self, fn span_err<S: Into<MultiSpan>>(self,
sp: S, sp: S,
handler: &errors::Handler) -> DiagnosticBuilder { handler: &errors::Handler) -> DiagnosticBuilder {
match self { match self {
@ -489,7 +483,7 @@ impl Error {
} }
#[derive(Debug)] #[derive(Debug)]
pub enum LhsExpr { enum LhsExpr {
NotYetParsed, NotYetParsed,
AttributesParsed(ThinVec<Attribute>), AttributesParsed(ThinVec<Attribute>),
AlreadyParsed(P<Expr>), AlreadyParsed(P<Expr>),
@ -596,17 +590,12 @@ impl<'a> Parser<'a> {
next next
} }
/// Convert a token to a string using self's reader
pub fn token_to_string(token: &token::Token) -> String {
pprust::token_to_string(token)
}
/// Convert the current token to a string using self's reader /// Convert the current token to a string using self's reader
pub fn this_token_to_string(&self) -> String { pub fn this_token_to_string(&self) -> String {
Parser::token_to_string(&self.token) pprust::token_to_string(&self.token)
} }
pub fn token_descr(&self) -> Option<&'static str> { fn token_descr(&self) -> Option<&'static str> {
Some(match &self.token { Some(match &self.token {
t if t.is_special_ident() => "reserved identifier", t if t.is_special_ident() => "reserved identifier",
t if t.is_used_keyword() => "keyword", t if t.is_used_keyword() => "keyword",
@ -615,7 +604,7 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn this_token_descr(&self) -> String { fn this_token_descr(&self) -> String {
if let Some(prefix) = self.token_descr() { if let Some(prefix) = self.token_descr() {
format!("{} `{}`", prefix, self.this_token_to_string()) format!("{} `{}`", prefix, self.this_token_to_string())
} else { } else {
@ -623,12 +612,12 @@ impl<'a> Parser<'a> {
} }
} }
pub fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> { fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> {
let token_str = Parser::token_to_string(t); let token_str = pprust::token_to_string(t);
Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str))) Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str)))
} }
pub fn unexpected<T>(&mut self) -> PResult<'a, T> { crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
match self.expect_one_of(&[], &[]) { match self.expect_one_of(&[], &[]) {
Err(e) => Err(e), Err(e) => Err(e),
Ok(_) => unreachable!(), Ok(_) => unreachable!(),
@ -643,7 +632,7 @@ impl<'a> Parser<'a> {
self.bump(); self.bump();
Ok(()) Ok(())
} else { } else {
let token_str = Parser::token_to_string(t); let token_str = pprust::token_to_string(t);
let this_token_str = self.this_token_to_string(); let this_token_str = self.this_token_to_string();
let mut err = self.fatal(&format!("expected `{}`, found `{}`", let mut err = self.fatal(&format!("expected `{}`, found `{}`",
token_str, token_str,
@ -659,7 +648,7 @@ impl<'a> Parser<'a> {
/// Expect next token to be edible or inedible token. If edible, /// Expect next token to be edible or inedible token. If edible,
/// then consume it; if inedible, then return without consuming /// then consume it; if inedible, then return without consuming
/// anything. Signal a fatal error if next token is unexpected. /// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(&mut self, fn expect_one_of(&mut self,
edible: &[token::Token], edible: &[token::Token],
inedible: &[token::Token]) -> PResult<'a, ()>{ inedible: &[token::Token]) -> PResult<'a, ()>{
fn tokens_to_string(tokens: &[TokenType]) -> String { fn tokens_to_string(tokens: &[TokenType]) -> String {
@ -771,7 +760,7 @@ impl<'a> Parser<'a> {
err err
} }
pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
self.parse_ident_common(true) self.parse_ident_common(true)
} }
@ -804,7 +793,7 @@ impl<'a> Parser<'a> {
/// ///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered. /// encountered.
pub fn check(&mut self, tok: &token::Token) -> bool { fn check(&mut self, tok: &token::Token) -> bool {
let is_present = self.token == *tok; let is_present = self.token == *tok;
if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
is_present is_present
@ -818,7 +807,7 @@ impl<'a> Parser<'a> {
is_present is_present
} }
pub fn check_keyword(&mut self, kw: keywords::Keyword) -> bool { fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw)); self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw) self.token.is_keyword(kw)
} }
@ -834,7 +823,7 @@ impl<'a> Parser<'a> {
} }
} }
pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
if self.token.is_keyword(kw) { if self.token.is_keyword(kw) {
self.bump(); self.bump();
true true
@ -846,7 +835,7 @@ impl<'a> Parser<'a> {
/// If the given word is not a keyword, signal an error. /// If the given word is not a keyword, signal an error.
/// If the next token is not the given word, signal an error. /// If the next token is not the given word, signal an error.
/// Otherwise, eat it. /// Otherwise, eat it.
pub fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
if !self.eat_keyword(kw) { if !self.eat_keyword(kw) {
self.unexpected() self.unexpected()
} else { } else {
@ -949,7 +938,7 @@ impl<'a> Parser<'a> {
} }
} }
pub fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) { fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
match suffix { match suffix {
None => {/* everything ok */} None => {/* everything ok */}
Some(suf) => { Some(suf) => {
@ -994,7 +983,7 @@ impl<'a> Parser<'a> {
/// Expect and consume a GT. if a >> is seen, replace it /// Expect and consume a GT. if a >> is seen, replace it
/// with a single > and continue. If a GT is not seen, /// with a single > and continue. If a GT is not seen,
/// signal an error. /// signal an error.
pub fn expect_gt(&mut self) -> PResult<'a, ()> { fn expect_gt(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::Gt)); self.expected_tokens.push(TokenType::Token(token::Gt));
match self.token { match self.token {
token::Gt => { token::Gt => {
@ -1017,83 +1006,9 @@ impl<'a> Parser<'a> {
} }
} }
pub fn parse_seq_to_before_gt_or_return<T, F>(&mut self,
sep: Option<token::Token>,
mut f: F)
-> PResult<'a, (Vec<T>, bool)>
where F: FnMut(&mut Parser<'a>) -> PResult<'a, Option<T>>,
{
let mut v = Vec::new();
// This loop works by alternating back and forth between parsing types
// and commas. For example, given a string `A, B,>`, the parser would
// first parse `A`, then a comma, then `B`, then a comma. After that it
// would encounter a `>` and stop. This lets the parser handle trailing
// commas in generic parameters, because it can stop either after
// parsing a type or after parsing a comma.
for i in 0.. {
if self.check(&token::Gt)
|| self.token == token::BinOp(token::Shr)
|| self.token == token::Ge
|| self.token == token::BinOpEq(token::Shr) {
break;
}
if i % 2 == 0 {
match f(self)? {
Some(result) => v.push(result),
None => return Ok((v, true))
}
} else {
if let Some(t) = sep.as_ref() {
self.expect(t)?;
}
}
}
return Ok((v, false));
}
/// Parse a sequence bracketed by '<' and '>', stopping
/// before the '>'.
pub fn parse_seq_to_before_gt<T, F>(&mut self,
sep: Option<token::Token>,
mut f: F)
-> PResult<'a, Vec<T>> where
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
{
let (result, returned) = self.parse_seq_to_before_gt_or_return(sep,
|p| Ok(Some(f(p)?)))?;
assert!(!returned);
return Ok(result);
}
pub fn parse_seq_to_gt<T, F>(&mut self,
sep: Option<token::Token>,
f: F)
-> PResult<'a, Vec<T>> where
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
{
let v = self.parse_seq_to_before_gt(sep, f)?;
self.expect_gt()?;
return Ok(v);
}
pub fn parse_seq_to_gt_or_return<T, F>(&mut self,
sep: Option<token::Token>,
f: F)
-> PResult<'a, (Vec<T>, bool)> where
F: FnMut(&mut Parser<'a>) -> PResult<'a, Option<T>>,
{
let (v, returned) = self.parse_seq_to_before_gt_or_return(sep, f)?;
if !returned {
self.expect_gt()?;
}
return Ok((v, returned));
}
/// Eat and discard tokens until one of `kets` is encountered. Respects token trees, /// Eat and discard tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery. /// passes through any errors encountered. Used for error recovery.
pub fn eat_to_tokens(&mut self, kets: &[&token::Token]) { fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
let handler = self.diagnostic(); let handler = self.diagnostic();
if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets, if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets,
@ -1107,7 +1022,7 @@ impl<'a> Parser<'a> {
/// Parse a sequence, including the closing delimiter. The function /// Parse a sequence, including the closing delimiter. The function
/// f must consume tokens until reaching the next separator or /// f must consume tokens until reaching the next separator or
/// closing bracket. /// closing bracket.
pub fn parse_seq_to_end<T, F>(&mut self, crate fn parse_seq_to_end<T, F>(&mut self,
ket: &token::Token, ket: &token::Token,
sep: SeqSep, sep: SeqSep,
f: F) f: F)
@ -1122,7 +1037,7 @@ impl<'a> Parser<'a> {
/// Parse a sequence, not including the closing delimiter. The function /// Parse a sequence, not including the closing delimiter. The function
/// f must consume tokens until reaching the next separator or /// f must consume tokens until reaching the next separator or
/// closing bracket. /// closing bracket.
pub fn parse_seq_to_before_end<T, F>(&mut self, fn parse_seq_to_before_end<T, F>(&mut self,
ket: &token::Token, ket: &token::Token,
sep: SeqSep, sep: SeqSep,
f: F) f: F)
@ -1197,7 +1112,7 @@ impl<'a> Parser<'a> {
/// Parse a sequence, including the closing delimiter. The function /// Parse a sequence, including the closing delimiter. The function
/// f must consume tokens until reaching the next separator or /// f must consume tokens until reaching the next separator or
/// closing bracket. /// closing bracket.
pub fn parse_unspanned_seq<T, F>(&mut self, fn parse_unspanned_seq<T, F>(&mut self,
bra: &token::Token, bra: &token::Token,
ket: &token::Token, ket: &token::Token,
sep: SeqSep, sep: SeqSep,
@ -1213,24 +1128,6 @@ impl<'a> Parser<'a> {
Ok(result) Ok(result)
} }
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
pub fn parse_seq<T, F>(&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: F)
-> PResult<'a, Spanned<Vec<T>>> where
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
{
let lo = self.span;
self.expect(bra)?;
let result = self.parse_seq_to_before_end(ket, sep, f)?;
let hi = self.span;
self.bump();
Ok(respan(lo.to(hi), result))
}
/// Advance the parser by one token /// Advance the parser by one token
pub fn bump(&mut self) { pub fn bump(&mut self) {
if self.prev_token_kind == PrevTokenKind::Eof { if self.prev_token_kind == PrevTokenKind::Eof {
@ -1261,7 +1158,7 @@ impl<'a> Parser<'a> {
/// Advance the parser using provided token as a next one. Use this when /// Advance the parser using provided token as a next one. Use this when
/// consuming a part of a token. For example a single `<` from `<<`. /// consuming a part of a token. For example a single `<` from `<<`.
pub fn bump_with(&mut self, next: token::Token, span: Span) { fn bump_with(&mut self, next: token::Token, span: Span) {
self.prev_span = self.span.with_hi(span.lo()); self.prev_span = self.span.with_hi(span.lo());
// It would be incorrect to record the kind of the current token, but // It would be incorrect to record the kind of the current token, but
// fortunately for tokens currently using `bump_with`, the // fortunately for tokens currently using `bump_with`, the
@ -1301,13 +1198,13 @@ impl<'a> Parser<'a> {
pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
self.sess.span_diagnostic.struct_span_fatal(self.span, m) self.sess.span_diagnostic.struct_span_fatal(self.span, m)
} }
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
self.sess.span_diagnostic.struct_span_fatal(sp, m) self.sess.span_diagnostic.struct_span_fatal(sp, m)
} }
pub fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
err.span_err(sp, self.diagnostic()) err.span_err(sp, self.diagnostic())
} }
pub fn span_fatal_help<S: Into<MultiSpan>>(&self, fn span_fatal_help<S: Into<MultiSpan>>(&self,
sp: S, sp: S,
m: &str, m: &str,
help: &str) -> DiagnosticBuilder<'a> { help: &str) -> DiagnosticBuilder<'a> {
@ -1315,30 +1212,19 @@ impl<'a> Parser<'a> {
err.help(help); err.help(help);
err err
} }
pub fn bug(&self, m: &str) -> ! { fn bug(&self, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(self.span, m) self.sess.span_diagnostic.span_bug(self.span, m)
} }
pub fn warn(&self, m: &str) { fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
self.sess.span_diagnostic.span_warn(self.span, m)
}
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
self.sess.span_diagnostic.span_warn(sp, m)
}
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
self.sess.span_diagnostic.span_err(sp, m) self.sess.span_diagnostic.span_err(sp, m)
} }
pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
self.sess.span_diagnostic.struct_span_err(sp, m) self.sess.span_diagnostic.struct_span_err(sp, m)
} }
pub fn span_err_help<S: Into<MultiSpan>>(&self, sp: S, m: &str, h: &str) { crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
let mut err = self.sess.span_diagnostic.mut_span_err(sp, m);
err.help(h);
err.emit();
}
pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(sp, m) self.sess.span_diagnostic.span_bug(sp, m)
} }
pub fn abort_if_errors(&self) { crate fn abort_if_errors(&self) {
self.sess.span_diagnostic.abort_if_errors(); self.sess.span_diagnostic.abort_if_errors();
} }
@ -1346,20 +1232,20 @@ impl<'a> Parser<'a> {
self.sess.span_diagnostic.cancel(err) self.sess.span_diagnostic.cancel(err)
} }
pub fn diagnostic(&self) -> &'a errors::Handler { crate fn diagnostic(&self) -> &'a errors::Handler {
&self.sess.span_diagnostic &self.sess.span_diagnostic
} }
/// Is the current token one of the keywords that signals a bare function /// Is the current token one of the keywords that signals a bare function
/// type? /// type?
pub fn token_is_bare_fn_keyword(&mut self) -> bool { fn token_is_bare_fn_keyword(&mut self) -> bool {
self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Fn) ||
self.check_keyword(keywords::Unsafe) || self.check_keyword(keywords::Unsafe) ||
self.check_keyword(keywords::Extern) && self.is_extern_non_path() self.check_keyword(keywords::Extern) && self.is_extern_non_path()
} }
/// parse a TyKind::BareFn type: /// parse a TyKind::BareFn type:
pub fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>)
-> PResult<'a, TyKind> { -> PResult<'a, TyKind> {
/* /*
@ -1786,7 +1672,7 @@ impl<'a> Parser<'a> {
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl })); return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
} }
pub fn parse_ptr(&mut self) -> PResult<'a, MutTy> { fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
let mutbl = if self.eat_keyword(keywords::Mut) { let mutbl = if self.eat_keyword(keywords::Mut) {
Mutability::Mutable Mutability::Mutable
} else if self.eat_keyword(keywords::Const) { } else if self.eat_keyword(keywords::Const) {
@ -1819,7 +1705,7 @@ impl<'a> Parser<'a> {
/// This version of parse arg doesn't necessarily require /// This version of parse arg doesn't necessarily require
/// identifier names. /// identifier names.
pub fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> { fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
maybe_whole!(self, NtArg, |x| x); maybe_whole!(self, NtArg, |x| x);
let (pat, ty) = if require_name || self.is_named_argument() { let (pat, ty) = if require_name || self.is_named_argument() {
@ -1849,12 +1735,12 @@ impl<'a> Parser<'a> {
} }
/// Parse a single function argument /// Parse a single function argument
pub fn parse_arg(&mut self) -> PResult<'a, Arg> { crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
self.parse_arg_general(true) self.parse_arg_general(true)
} }
/// Parse an argument in a lambda header e.g. |arg, arg| /// Parse an argument in a lambda header e.g. |arg, arg|
pub fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
let pat = self.parse_pat()?; let pat = self.parse_pat()?;
let t = if self.eat(&token::Colon) { let t = if self.eat(&token::Colon) {
self.parse_ty()? self.parse_ty()?
@ -1872,7 +1758,7 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> { fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
if self.eat(&token::Semi) { if self.eat(&token::Semi) {
Ok(Some(self.parse_expr()?)) Ok(Some(self.parse_expr()?))
} else { } else {
@ -1881,7 +1767,7 @@ impl<'a> Parser<'a> {
} }
/// Matches token_lit = LIT_INTEGER | ... /// Matches token_lit = LIT_INTEGER | ...
pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
let out = match self.token { let out = match self.token {
token::Interpolated(ref nt) => match nt.0 { token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
@ -1909,7 +1795,7 @@ impl<'a> Parser<'a> {
} }
/// Matches lit = true | false | token_lit /// Matches lit = true | false | token_lit
pub fn parse_lit(&mut self) -> PResult<'a, Lit> { crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
let lo = self.span; let lo = self.span;
let lit = if self.eat_keyword(keywords::True) { let lit = if self.eat_keyword(keywords::True) {
LitKind::Bool(true) LitKind::Bool(true)
@ -1923,7 +1809,7 @@ impl<'a> Parser<'a> {
} }
/// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat) /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat)
pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
maybe_whole_expr!(self); maybe_whole_expr!(self);
let minus_lo = self.span; let minus_lo = self.span;
@ -1942,7 +1828,7 @@ impl<'a> Parser<'a> {
} }
} }
pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token { match self.token {
token::Ident(ident, _) if self.token.is_path_segment_keyword() => { token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
let span = self.span; let span = self.span;
@ -2000,11 +1886,11 @@ impl<'a> Parser<'a> {
/// `a::b::C::<D>` (with disambiguator) /// `a::b::C::<D>` (with disambiguator)
/// `Fn(Args)` (without disambiguator) /// `Fn(Args)` (without disambiguator)
/// `Fn::(Args)` (with disambiguator) /// `Fn::(Args)` (with disambiguator)
pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { crate fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
self.parse_path_common(style, true) self.parse_path_common(style, true)
} }
pub fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool) crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool)
-> PResult<'a, ast::Path> { -> PResult<'a, ast::Path> {
maybe_whole!(self, NtPath, |path| { maybe_whole!(self, NtPath, |path| {
if style == PathStyle::Mod && if style == PathStyle::Mod &&
@ -2114,13 +2000,13 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn check_lifetime(&mut self) -> bool { crate fn check_lifetime(&mut self) -> bool {
self.expected_tokens.push(TokenType::Lifetime); self.expected_tokens.push(TokenType::Lifetime);
self.token.is_lifetime() self.token.is_lifetime()
} }
/// Parse single lifetime 'a or panic. /// Parse single lifetime 'a or panic.
pub fn expect_lifetime(&mut self) -> Lifetime { crate fn expect_lifetime(&mut self) -> Lifetime {
if let Some(ident) = self.token.lifetime() { if let Some(ident) = self.token.lifetime() {
let span = self.span; let span = self.span;
self.bump(); self.bump();
@ -2149,7 +2035,7 @@ impl<'a> Parser<'a> {
} }
} }
pub fn parse_field_name(&mut self) -> PResult<'a, Ident> { fn parse_field_name(&mut self) -> PResult<'a, Ident> {
if let token::Literal(token::Integer(name), None) = self.token { if let token::Literal(token::Integer(name), None) = self.token {
self.bump(); self.bump();
Ok(Ident::new(name, self.prev_span)) Ok(Ident::new(name, self.prev_span))
@ -2159,7 +2045,7 @@ impl<'a> Parser<'a> {
} }
/// Parse ident (COLON expr)? /// Parse ident (COLON expr)?
pub fn parse_field(&mut self) -> PResult<'a, Field> { fn parse_field(&mut self) -> PResult<'a, Field> {
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
let lo = self.span; let lo = self.span;
@ -2185,27 +2071,27 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> { fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID }) P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
} }
pub fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind { fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
ExprKind::Unary(unop, expr) ExprKind::Unary(unop, expr)
} }
pub fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind { fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
ExprKind::Binary(binop, lhs, rhs) ExprKind::Binary(binop, lhs, rhs)
} }
pub fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind { fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
ExprKind::Call(f, args) ExprKind::Call(f, args)
} }
pub fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind { fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
ExprKind::Index(expr, idx) ExprKind::Index(expr, idx)
} }
pub fn mk_range(&mut self, fn mk_range(&mut self,
start: Option<P<Expr>>, start: Option<P<Expr>>,
end: Option<P<Expr>>, end: Option<P<Expr>>,
limits: RangeLimits) limits: RangeLimits)
@ -2217,12 +2103,12 @@ impl<'a> Parser<'a> {
} }
} }
pub fn mk_assign_op(&mut self, binop: ast::BinOp, fn mk_assign_op(&mut self, binop: ast::BinOp,
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind { lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
ExprKind::AssignOp(binop, lhs, rhs) ExprKind::AssignOp(binop, lhs, rhs)
} }
pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> { fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr { P(Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ExprKind::Mac(codemap::Spanned {node: m, span: span}), node: ExprKind::Mac(codemap::Spanned {node: m, span: span}),
@ -2231,21 +2117,6 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinVec<Attribute>) -> P<Expr> {
let span = &self.span;
let lv_lit = P(codemap::Spanned {
node: LitKind::Int(i as u128, ast::LitIntType::Unsigned(UintTy::U32)),
span: *span
});
P(Expr {
id: ast::DUMMY_NODE_ID,
node: ExprKind::Lit(lv_lit),
span: *span,
attrs,
})
}
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> { fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
let delim = match self.token { let delim = match self.token {
token::OpenDelim(delim) => delim, token::OpenDelim(delim) => delim,
@ -2598,7 +2469,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a block or unsafe block /// Parse a block or unsafe block
pub fn parse_block_expr(&mut self, opt_label: Option<Label>, fn parse_block_expr(&mut self, opt_label: Option<Label>,
lo: Span, blk_mode: BlockCheckMode, lo: Span, blk_mode: BlockCheckMode,
outer_attrs: ThinVec<Attribute>) outer_attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
@ -2612,7 +2483,7 @@ impl<'a> Parser<'a> {
} }
/// parse a.b or a(13) or a[4] or just a /// parse a.b or a(13) or a[4] or just a
pub fn parse_dot_or_call_expr(&mut self, fn parse_dot_or_call_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>) already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
@ -2622,7 +2493,7 @@ impl<'a> Parser<'a> {
self.parse_dot_or_call_expr_with(b, span, attrs) self.parse_dot_or_call_expr_with(b, span, attrs)
} }
pub fn parse_dot_or_call_expr_with(&mut self, fn parse_dot_or_call_expr_with(&mut self,
e0: P<Expr>, e0: P<Expr>,
lo: Span, lo: Span,
mut attrs: ThinVec<Attribute>) mut attrs: ThinVec<Attribute>)
@ -2776,7 +2647,7 @@ impl<'a> Parser<'a> {
return Ok(e); return Ok(e);
} }
pub fn process_potential_macro_variable(&mut self) { crate fn process_potential_macro_variable(&mut self) {
let (token, span) = match self.token { let (token, span) = match self.token {
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
self.look_ahead(1, |t| t.is_ident()) => { self.look_ahead(1, |t| t.is_ident()) => {
@ -2807,7 +2678,7 @@ impl<'a> Parser<'a> {
} }
/// parse a single token tree from the input. /// parse a single token tree from the input.
pub fn parse_token_tree(&mut self) -> TokenTree { crate fn parse_token_tree(&mut self) -> TokenTree {
match self.token { match self.token {
token::OpenDelim(..) => { token::OpenDelim(..) => {
let frame = mem::replace(&mut self.token_cursor.frame, let frame = mem::replace(&mut self.token_cursor.frame,
@ -2850,7 +2721,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a prefix-unary-operator expr /// Parse a prefix-unary-operator expr
pub fn parse_prefix_expr(&mut self, fn parse_prefix_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>) already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
@ -2969,14 +2840,14 @@ impl<'a> Parser<'a> {
/// ///
/// This parses an expression accounting for associativity and precedence of the operators in /// This parses an expression accounting for associativity and precedence of the operators in
/// the expression. /// the expression.
pub fn parse_assoc_expr(&mut self, fn parse_assoc_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>) already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
self.parse_assoc_expr_with(0, already_parsed_attrs.into()) self.parse_assoc_expr_with(0, already_parsed_attrs.into())
} }
/// Parse an associative expression with operators of at least `min_prec` precedence /// Parse an associative expression with operators of at least `min_prec` precedence
pub fn parse_assoc_expr_with(&mut self, fn parse_assoc_expr_with(&mut self,
min_prec: usize, min_prec: usize,
lhs: LhsExpr) lhs: LhsExpr)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
@ -3305,7 +3176,7 @@ impl<'a> Parser<'a> {
} }
/// Parse an 'if' or 'if let' expression ('if' token already eaten) /// Parse an 'if' or 'if let' expression ('if' token already eaten)
pub fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.check_keyword(keywords::Let) { if self.check_keyword(keywords::Let) {
return self.parse_if_let_expr(attrs); return self.parse_if_let_expr(attrs);
} }
@ -3341,7 +3212,7 @@ impl<'a> Parser<'a> {
} }
/// Parse an 'if let' expression ('if' token already eaten) /// Parse an 'if let' expression ('if' token already eaten)
pub fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>) fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
let lo = self.prev_span; let lo = self.prev_span;
self.expect_keyword(keywords::Let)?; self.expect_keyword(keywords::Let)?;
@ -3359,7 +3230,7 @@ impl<'a> Parser<'a> {
} }
// `move |args| expr` // `move |args| expr`
pub fn parse_lambda_expr(&mut self, fn parse_lambda_expr(&mut self,
attrs: ThinVec<Attribute>) attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> -> PResult<'a, P<Expr>>
{ {
@ -3396,7 +3267,7 @@ impl<'a> Parser<'a> {
} }
// `else` token already eaten // `else` token already eaten
pub fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> { fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
if self.eat_keyword(keywords::If) { if self.eat_keyword(keywords::If) {
return self.parse_if_expr(ThinVec::new()); return self.parse_if_expr(ThinVec::new());
} else { } else {
@ -3406,7 +3277,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a 'for' .. 'in' expression ('for' token already eaten) /// Parse a 'for' .. 'in' expression ('for' token already eaten)
pub fn parse_for_expr(&mut self, opt_label: Option<Label>, fn parse_for_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>` // Parse: `for <src_pat> in <src_expr> <src_loop_block>`
@ -3432,7 +3303,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a 'while' or 'while let' expression ('while' token already eaten) /// Parse a 'while' or 'while let' expression ('while' token already eaten)
pub fn parse_while_expr(&mut self, opt_label: Option<Label>, fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.token.is_keyword(keywords::Let) { if self.token.is_keyword(keywords::Let) {
@ -3446,7 +3317,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a 'while let' expression ('while' token already eaten) /// Parse a 'while let' expression ('while' token already eaten)
pub fn parse_while_let_expr(&mut self, opt_label: Option<Label>, fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
self.expect_keyword(keywords::Let)?; self.expect_keyword(keywords::Let)?;
@ -3460,7 +3331,7 @@ impl<'a> Parser<'a> {
} }
// parse `loop {...}`, `loop` token already eaten // parse `loop {...}`, `loop` token already eaten
pub fn parse_loop_expr(&mut self, opt_label: Option<Label>, fn parse_loop_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?; let (iattrs, body) = self.parse_inner_attrs_and_block()?;
@ -3470,7 +3341,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a `do catch {...}` expression (`do catch` token already eaten) /// Parse a `do catch {...}` expression (`do catch` token already eaten)
pub fn parse_catch_expr(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>) fn parse_catch_expr(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> -> PResult<'a, P<Expr>>
{ {
let (iattrs, body) = self.parse_inner_attrs_and_block()?; let (iattrs, body) = self.parse_inner_attrs_and_block()?;
@ -3518,7 +3389,7 @@ impl<'a> Parser<'a> {
return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
} }
pub fn parse_arm(&mut self) -> PResult<'a, Arm> { crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
maybe_whole!(self, NtArm, |x| x); maybe_whole!(self, NtArm, |x| x);
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
@ -3597,7 +3468,7 @@ impl<'a> Parser<'a> {
/// Evaluate the closure with restrictions in place. /// Evaluate the closure with restrictions in place.
/// ///
/// After the closure is evaluated, restrictions are reset. /// After the closure is evaluated, restrictions are reset.
pub fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
where F: FnOnce(&mut Self) -> T where F: FnOnce(&mut Self) -> T
{ {
let old = self.restrictions; let old = self.restrictions;
@ -3609,7 +3480,7 @@ impl<'a> Parser<'a> {
} }
/// Parse an expression, subject to the given restrictions /// Parse an expression, subject to the given restrictions
pub fn parse_expr_res(&mut self, r: Restrictions, fn parse_expr_res(&mut self, r: Restrictions,
already_parsed_attrs: Option<ThinVec<Attribute>>) already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> { -> PResult<'a, P<Expr>> {
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
@ -3953,7 +3824,7 @@ impl<'a> Parser<'a> {
/// A wrapper around `parse_pat` with some special error handling for the /// A wrapper around `parse_pat` with some special error handling for the
/// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contast /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contast
/// to subpatterns within such). /// to subpatterns within such).
pub fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> { fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
let pat = self.parse_pat()?; let pat = self.parse_pat()?;
if self.token == token::Comma { if self.token == token::Comma {
// An unexpected comma after a top-level pattern is a clue that the // An unexpected comma after a top-level pattern is a clue that the
@ -4701,7 +4572,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a block. No inner attrs are allowed. /// Parse a block. No inner attrs are allowed.
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> { crate fn parse_block(&mut self) -> PResult<'a, P<Block>> {
maybe_whole!(self, NtBlock, |x| x); maybe_whole!(self, NtBlock, |x| x);
let lo = self.span; let lo = self.span;
@ -4802,7 +4673,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a statement, including the trailing semicolon. /// Parse a statement, including the trailing semicolon.
pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
// skip looking for a trailing semicolon when we have an interpolated statement // skip looking for a trailing semicolon when we have an interpolated statement
maybe_whole!(self, NtStmt, |x| Some(x)); maybe_whole!(self, NtStmt, |x| Some(x));
@ -4991,7 +4862,7 @@ impl<'a> Parser<'a> {
/// Parses (possibly empty) list of lifetime and type parameters, possibly including /// Parses (possibly empty) list of lifetime and type parameters, possibly including
/// trailing comma and erroneous trailing attributes. /// trailing comma and erroneous trailing attributes.
pub fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
let mut params = Vec::new(); let mut params = Vec::new();
let mut seen_ty_param = false; let mut seen_ty_param = false;
loop { loop {
@ -5041,7 +4912,7 @@ impl<'a> Parser<'a> {
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > ) /// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
/// | ( < lifetimes , typaramseq ( , )? > ) /// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq ) /// where typaramseq = ( typaram ) | ( typaram , typaramseq )
pub fn parse_generics(&mut self) -> PResult<'a, ast::Generics> { fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
maybe_whole!(self, NtGenerics, |x| x); maybe_whole!(self, NtGenerics, |x| x);
let span_lo = self.span; let span_lo = self.span;
@ -5115,7 +4986,7 @@ impl<'a> Parser<'a> {
/// ```ignore (only-for-syntax-highlight) /// ```ignore (only-for-syntax-highlight)
/// where T : Trait<U, V> + 'b, 'a : 'b /// where T : Trait<U, V> + 'b, 'a : 'b
/// ``` /// ```
pub fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> { fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
maybe_whole!(self, NtWhereClause, |x| x); maybe_whole!(self, NtWhereClause, |x| x);
let mut where_clause = WhereClause { let mut where_clause = WhereClause {
@ -5266,7 +5137,7 @@ impl<'a> Parser<'a> {
} }
/// Parse the argument list and result type of a function declaration /// Parse the argument list and result type of a function declaration
pub fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> { fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> {
let (args, variadic) = self.parse_fn_args(true, allow_variadic)?; let (args, variadic) = self.parse_fn_args(true, allow_variadic)?;
let ret_ty = self.parse_ret_ty(true)?; let ret_ty = self.parse_ret_ty(true)?;
@ -5478,7 +5349,7 @@ impl<'a> Parser<'a> {
} }
/// true if we are looking at `const ID`, false for things like `const fn` etc /// true if we are looking at `const ID`, false for things like `const fn` etc
pub fn is_const_item(&mut self) -> bool { fn is_const_item(&mut self) -> bool {
self.token.is_keyword(keywords::Const) && self.token.is_keyword(keywords::Const) &&
!self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) && !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
!self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
@ -5492,7 +5363,7 @@ impl<'a> Parser<'a> {
/// - `const unsafe fn` /// - `const unsafe fn`
/// - `extern fn` /// - `extern fn`
/// - etc /// - etc
pub fn parse_fn_front_matter(&mut self) -> PResult<'a, (Spanned<Constness>, Unsafety, Abi)> { fn parse_fn_front_matter(&mut self) -> PResult<'a, (Spanned<Constness>, Unsafety, Abi)> {
let is_const_fn = self.eat_keyword(keywords::Const); let is_const_fn = self.eat_keyword(keywords::Const);
let const_span = self.prev_span; let const_span = self.prev_span;
let unsafety = self.parse_unsafety(); let unsafety = self.parse_unsafety();
@ -5511,7 +5382,7 @@ impl<'a> Parser<'a> {
} }
/// Parse an impl item. /// Parse an impl item.
pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> { crate fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
maybe_whole!(self, NtImplItem, |x| x); maybe_whole!(self, NtImplItem, |x| x);
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
let (mut item, tokens) = self.collect_tokens(|this| { let (mut item, tokens) = self.collect_tokens(|this| {
@ -5931,7 +5802,7 @@ impl<'a> Parser<'a> {
} }
} }
pub fn parse_record_struct_body(&mut self) -> PResult<'a, Vec<StructField>> { fn parse_record_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
let mut fields = Vec::new(); let mut fields = Vec::new();
if self.eat(&token::OpenDelim(token::Brace)) { if self.eat(&token::OpenDelim(token::Brace)) {
while self.token != token::CloseDelim(token::Brace) { while self.token != token::CloseDelim(token::Brace) {
@ -5958,7 +5829,7 @@ impl<'a> Parser<'a> {
Ok(fields) Ok(fields)
} }
pub fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> { fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;` // This is the case where we find `struct Foo<T>(T) where T: Copy;`
// Unit like structs are handled in parse_item_struct function // Unit like structs are handled in parse_item_struct function
let fields = self.parse_unspanned_seq( let fields = self.parse_unspanned_seq(
@ -5984,7 +5855,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a structure field declaration /// Parse a structure field declaration
pub fn parse_single_struct_field(&mut self, fn parse_single_struct_field(&mut self,
lo: Span, lo: Span,
vis: Visibility, vis: Visibility,
attrs: Vec<Attribute> ) attrs: Vec<Attribute> )
@ -7026,7 +6897,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a foreign item. /// Parse a foreign item.
pub fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> { crate fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> {
maybe_whole!(self, NtForeignItem, |ni| Some(ni)); maybe_whole!(self, NtForeignItem, |ni| Some(ni));
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
@ -7328,7 +7199,7 @@ impl<'a> Parser<'a> {
}) })
} }
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
let ret = match self.token { let ret = match self.token {
token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf), token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf), token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),

View file

@ -80,7 +80,7 @@ pub enum Lit {
} }
impl Lit { impl Lit {
pub fn short_name(&self) -> &'static str { crate fn short_name(&self) -> &'static str {
match *self { match *self {
Byte(_) => "byte", Byte(_) => "byte",
Char(_) => "char", Char(_) => "char",
@ -217,15 +217,7 @@ impl Token {
Ident(ident, ident.is_raw_guess()) Ident(ident, ident.is_raw_guess())
} }
/// Returns `true` if the token starts with '>'. crate fn is_like_plus(&self) -> bool {
pub fn is_like_gt(&self) -> bool {
match *self {
BinOp(Shr) | BinOpEq(Shr) | Gt | Ge => true,
_ => false,
}
}
pub fn is_like_plus(&self) -> bool {
match *self { match *self {
BinOp(Plus) | BinOpEq(Plus) => true, BinOp(Plus) | BinOpEq(Plus) => true,
_ => false, _ => false,
@ -233,7 +225,7 @@ impl Token {
} }
/// Returns `true` if the token can appear at the start of an expression. /// Returns `true` if the token can appear at the start of an expression.
pub fn can_begin_expr(&self) -> bool { crate fn can_begin_expr(&self) -> bool {
match *self { match *self {
Ident(ident, is_raw) => Ident(ident, is_raw) =>
ident_can_begin_expr(ident, is_raw), // value name or keyword ident_can_begin_expr(ident, is_raw), // value name or keyword
@ -265,7 +257,7 @@ impl Token {
} }
/// Returns `true` if the token can appear at the start of a type. /// Returns `true` if the token can appear at the start of a type.
pub fn can_begin_type(&self) -> bool { crate fn can_begin_type(&self) -> bool {
match *self { match *self {
Ident(ident, is_raw) => Ident(ident, is_raw) =>
ident_can_begin_type(ident, is_raw), // type name or keyword ident_can_begin_type(ident, is_raw), // type name or keyword
@ -288,13 +280,13 @@ impl Token {
} }
/// Returns `true` if the token can appear at the start of a generic bound. /// Returns `true` if the token can appear at the start of a generic bound.
pub fn can_begin_bound(&self) -> bool { crate fn can_begin_bound(&self) -> bool {
self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) || self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
self == &Question || self == &OpenDelim(Paren) self == &Question || self == &OpenDelim(Paren)
} }
/// Returns `true` if the token is any literal /// Returns `true` if the token is any literal
pub fn is_lit(&self) -> bool { crate fn is_lit(&self) -> bool {
match *self { match *self {
Literal(..) => true, Literal(..) => true,
_ => false, _ => false,
@ -303,7 +295,7 @@ impl Token {
/// Returns `true` if the token is any literal, a minus (which can follow a literal, /// Returns `true` if the token is any literal, a minus (which can follow a literal,
/// for example a '-42', or one of the boolean idents). /// for example a '-42', or one of the boolean idents).
pub fn can_begin_literal_or_bool(&self) -> bool { crate fn can_begin_literal_or_bool(&self) -> bool {
match *self { match *self {
Literal(..) => true, Literal(..) => true,
BinOp(Minus) => true, BinOp(Minus) => true,
@ -340,37 +332,21 @@ impl Token {
self.ident().is_some() self.ident().is_some()
} }
/// Returns `true` if the token is a lifetime. /// Returns `true` if the token is a lifetime.
pub fn is_lifetime(&self) -> bool { crate fn is_lifetime(&self) -> bool {
self.lifetime().is_some() self.lifetime().is_some()
} }
/// Returns `true` if the token is a identifier whose name is the given /// Returns `true` if the token is a identifier whose name is the given
/// string slice. /// string slice.
pub fn is_ident_named(&self, name: &str) -> bool { crate fn is_ident_named(&self, name: &str) -> bool {
match self.ident() { match self.ident() {
Some((ident, _)) => ident.as_str() == name, Some((ident, _)) => ident.as_str() == name,
None => false None => false
} }
} }
/// Returns `true` if the token is a documentation comment.
pub fn is_doc_comment(&self) -> bool {
match *self {
DocComment(..) => true,
_ => false,
}
}
/// Returns `true` if the token is interpolated.
pub fn is_interpolated(&self) -> bool {
match *self {
Interpolated(..) => true,
_ => false,
}
}
/// Returns `true` if the token is an interpolated path. /// Returns `true` if the token is an interpolated path.
pub fn is_path(&self) -> bool { fn is_path(&self) -> bool {
if let Interpolated(ref nt) = *self { if let Interpolated(ref nt) = *self {
if let NtPath(..) = nt.0 { if let NtPath(..) = nt.0 {
return true; return true;
@ -380,16 +356,16 @@ impl Token {
} }
/// Returns `true` if the token is either the `mut` or `const` keyword. /// Returns `true` if the token is either the `mut` or `const` keyword.
pub fn is_mutability(&self) -> bool { crate fn is_mutability(&self) -> bool {
self.is_keyword(keywords::Mut) || self.is_keyword(keywords::Mut) ||
self.is_keyword(keywords::Const) self.is_keyword(keywords::Const)
} }
pub fn is_qpath_start(&self) -> bool { crate fn is_qpath_start(&self) -> bool {
self == &Lt || self == &BinOp(Shl) self == &Lt || self == &BinOp(Shl)
} }
pub fn is_path_start(&self) -> bool { crate fn is_path_start(&self) -> bool {
self == &ModSep || self.is_qpath_start() || self.is_path() || self == &ModSep || self.is_qpath_start() || self.is_path() ||
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident() self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
} }
@ -416,7 +392,7 @@ impl Token {
} }
/// Returns `true` if the token is a keyword used in the language. /// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(&self) -> bool { crate fn is_used_keyword(&self) -> bool {
match self.ident() { match self.ident() {
Some((id, false)) => id.is_used_keyword(), Some((id, false)) => id.is_used_keyword(),
_ => false, _ => false,
@ -424,7 +400,7 @@ impl Token {
} }
/// Returns `true` if the token is a keyword reserved for possible future use. /// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(&self) -> bool { crate fn is_unused_keyword(&self) -> bool {
match self.ident() { match self.ident() {
Some((id, false)) => id.is_unused_keyword(), Some((id, false)) => id.is_unused_keyword(),
_ => false, _ => false,
@ -439,7 +415,7 @@ impl Token {
} }
} }
pub fn glue(self, joint: Token) -> Option<Token> { crate fn glue(self, joint: Token) -> Option<Token> {
Some(match self { Some(match self {
Eq => match joint { Eq => match joint {
Eq => EqEq, Eq => EqEq,
@ -507,7 +483,7 @@ impl Token {
/// Returns tokens that are likely to be typed accidentally instead of the current token. /// Returns tokens that are likely to be typed accidentally instead of the current token.
/// Enables better error recovery when the wrong token is found. /// Enables better error recovery when the wrong token is found.
pub fn similar_tokens(&self) -> Option<Vec<Token>> { crate fn similar_tokens(&self) -> Option<Vec<Token>> {
match *self { match *self {
Comma => Some(vec![Dot, Lt]), Comma => Some(vec![Dot, Lt]),
Semi => Some(vec![Colon]), Semi => Some(vec![Colon]),
@ -603,7 +579,7 @@ impl Token {
// See comments in `interpolated_to_tokenstream` for why we care about // See comments in `interpolated_to_tokenstream` for why we care about
// *probably* equal here rather than actual equality // *probably* equal here rather than actual equality
pub fn probably_equal_for_proc_macro(&self, other: &Token) -> bool { crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
if mem::discriminant(self) != mem::discriminant(other) { if mem::discriminant(self) != mem::discriminant(other) {
return false return false
} }
@ -732,7 +708,7 @@ impl fmt::Debug for Nonterminal {
} }
} }
pub fn is_op(tok: &Token) -> bool { crate fn is_op(tok: &Token) -> bool {
match *tok { match *tok {
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
Ident(..) | Lifetime(..) | Interpolated(..) | Ident(..) | Lifetime(..) | Interpolated(..) |
@ -758,11 +734,11 @@ impl fmt::Debug for LazyTokenStream {
} }
impl LazyTokenStream { impl LazyTokenStream {
pub fn new() -> Self { fn new() -> Self {
LazyTokenStream(Lock::new(None)) LazyTokenStream(Lock::new(None))
} }
pub fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream { fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
let mut opt_stream = self.0.lock(); let mut opt_stream = self.0.lock();
if opt_stream.is_none() { if opt_stream.is_none() {
*opt_stream = Some(f()); *opt_stream = Some(f());