libsyntax: Fix snake_case errors.
A number of functions/methods have been moved or renamed to align better with rust standard conventions. syntax::ext::mtwt::xorPush => xor_push syntax::parse::parser::Parser => Parser::new [breaking-change]
This commit is contained in:
parent
16f15ce391
commit
190d8bdbc6
8 changed files with 73 additions and 68 deletions
|
@ -278,50 +278,11 @@ struct ParsedItemsAndViewItems {
|
|||
attrs_remaining: Vec<Attribute> ,
|
||||
view_items: Vec<ViewItem> ,
|
||||
items: Vec<@Item> ,
|
||||
foreign_items: Vec<@ForeignItem> }
|
||||
foreign_items: Vec<@ForeignItem>
|
||||
}
|
||||
|
||||
/* ident is handled by common.rs */
|
||||
|
||||
pub fn Parser<'a>(
|
||||
sess: &'a ParseSess,
|
||||
cfg: ast::CrateConfig,
|
||||
mut rdr: Box<Reader:>)
|
||||
-> Parser<'a> {
|
||||
let tok0 = rdr.next_token();
|
||||
let span = tok0.sp;
|
||||
let placeholder = TokenAndSpan {
|
||||
tok: token::UNDERSCORE,
|
||||
sp: span,
|
||||
};
|
||||
|
||||
Parser {
|
||||
reader: rdr,
|
||||
interner: token::get_ident_interner(),
|
||||
sess: sess,
|
||||
cfg: cfg,
|
||||
token: tok0.tok,
|
||||
span: span,
|
||||
last_span: span,
|
||||
last_token: None,
|
||||
buffer: [
|
||||
placeholder.clone(),
|
||||
placeholder.clone(),
|
||||
placeholder.clone(),
|
||||
placeholder.clone(),
|
||||
],
|
||||
buffer_start: 0,
|
||||
buffer_end: 0,
|
||||
tokens_consumed: 0,
|
||||
restriction: UNRESTRICTED,
|
||||
quote_depth: 0,
|
||||
obsolete_set: HashSet::new(),
|
||||
mod_path_stack: Vec::new(),
|
||||
open_braces: Vec::new(),
|
||||
owns_directory: true,
|
||||
root_module_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Parser<'a> {
|
||||
pub sess: &'a ParseSess,
|
||||
// the current token:
|
||||
|
@ -362,6 +323,41 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
|
|||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub fn new(sess: &'a ParseSess, cfg: ast::CrateConfig, mut rdr: Box<Reader:>) -> Parser<'a> {
|
||||
let tok0 = rdr.next_token();
|
||||
let span = tok0.sp;
|
||||
let placeholder = TokenAndSpan {
|
||||
tok: token::UNDERSCORE,
|
||||
sp: span,
|
||||
};
|
||||
|
||||
Parser {
|
||||
reader: rdr,
|
||||
interner: token::get_ident_interner(),
|
||||
sess: sess,
|
||||
cfg: cfg,
|
||||
token: tok0.tok,
|
||||
span: span,
|
||||
last_span: span,
|
||||
last_token: None,
|
||||
buffer: [
|
||||
placeholder.clone(),
|
||||
placeholder.clone(),
|
||||
placeholder.clone(),
|
||||
placeholder.clone(),
|
||||
],
|
||||
buffer_start: 0,
|
||||
buffer_end: 0,
|
||||
tokens_consumed: 0,
|
||||
restriction: UNRESTRICTED,
|
||||
quote_depth: 0,
|
||||
obsolete_set: HashSet::new(),
|
||||
mod_path_stack: Vec::new(),
|
||||
open_braces: Vec::new(),
|
||||
owns_directory: true,
|
||||
root_module_name: None,
|
||||
}
|
||||
}
|
||||
// convert a token to a string using self's reader
|
||||
pub fn token_to_str(token: &token::Token) -> String {
|
||||
token::to_str(token)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue