Fix some clippy warnings in libsyntax

This is mostly removing stray ampersands, needless returns and lifetimes.
This commit is contained in:
Andre Bogus 2017-05-12 20:05:39 +02:00
parent e19ccb71c8
commit a9c163ebe9
32 changed files with 498 additions and 537 deletions

View file

@ -107,18 +107,18 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
parser.parse_inner_attributes()
}
pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, ast::Crate> {
pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
-> PResult<ast::Crate> {
new_parser_from_source_str(sess, name, source).parse_crate_mod()
}
pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, Vec<ast::Attribute>> {
pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
-> PResult<Vec<ast::Attribute>> {
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
}
pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, P<ast::Expr>> {
pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
-> PResult<P<ast::Expr>> {
new_parser_from_source_str(sess, name, source).parse_expr()
}
@ -126,29 +126,29 @@ pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a Pa
///
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
/// when a syntax error occurred.
pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, Option<P<ast::Item>>> {
pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
-> PResult<Option<P<ast::Item>>> {
new_parser_from_source_str(sess, name, source).parse_item()
}
pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, ast::MetaItem> {
pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
-> PResult<ast::MetaItem> {
new_parser_from_source_str(sess, name, source).parse_meta_item()
}
pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> PResult<'a, Option<ast::Stmt>> {
pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
-> PResult<Option<ast::Stmt>> {
new_parser_from_source_str(sess, name, source).parse_stmt()
}
pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
-> TokenStream {
filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
}
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-> Parser<'a> {
pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
-> Parser {
filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
}
@ -173,7 +173,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
}
/// Given a filemap and config, return a parser
pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
let end_pos = filemap.end_pos;
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
@ -186,7 +186,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
stream_to_parser(sess, tts.into_iter().collect())
}
@ -216,8 +216,8 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream
panictry!(srdr.parse_all_token_trees())
}
/// Given stream and the ParseSess, produce a parser
pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
/// Given stream and the `ParseSess`, produce a parser
pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
Parser::new(sess, stream, None, false)
}
@ -251,7 +251,7 @@ pub fn char_lit(lit: &str) -> (char, isize) {
(c, 4)
}
'u' => {
assert!(lit.as_bytes()[2] == b'{');
assert_eq!(lit.as_bytes()[2], b'{');
let idx = lit.find('}').unwrap();
let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
let c = char::from_u32(v).unwrap();
@ -287,51 +287,46 @@ pub fn str_lit(lit: &str) -> String {
}
let mut chars = lit.char_indices().peekable();
loop {
match chars.next() {
Some((i, c)) => {
match c {
'\\' => {
let ch = chars.peek().unwrap_or_else(|| {
panic!("{}", error(i))
}).1;
while let Some((i, c)) = chars.next() {
match c {
'\\' => {
let ch = chars.peek().unwrap_or_else(|| {
panic!("{}", error(i))
}).1;
if ch == '\n' {
eat(&mut chars);
} else if ch == '\r' {
chars.next();
let ch = chars.peek().unwrap_or_else(|| {
panic!("{}", error(i))
}).1;
if ch == '\n' {
eat(&mut chars);
} else if ch == '\r' {
chars.next();
let ch = chars.peek().unwrap_or_else(|| {
panic!("{}", error(i))
}).1;
if ch != '\n' {
panic!("lexer accepted bare CR");
}
eat(&mut chars);
} else {
// otherwise, a normal escape
let (c, n) = char_lit(&lit[i..]);
for _ in 0..n - 1 { // we don't need to move past the first \
chars.next();
}
res.push(c);
}
},
'\r' => {
let ch = chars.peek().unwrap_or_else(|| {
panic!("{}", error(i))
}).1;
if ch != '\n' {
panic!("lexer accepted bare CR");
}
chars.next();
res.push('\n');
if ch != '\n' {
panic!("lexer accepted bare CR");
}
c => res.push(c),
eat(&mut chars);
} else {
// otherwise, a normal escape
let (c, n) = char_lit(&lit[i..]);
for _ in 0..n - 1 { // we don't need to move past the first \
chars.next();
}
res.push(c);
}
},
None => break
'\r' => {
let ch = chars.peek().unwrap_or_else(|| {
panic!("{}", error(i))
}).1;
if ch != '\n' {
panic!("lexer accepted bare CR");
}
chars.next();
res.push('\n');
}
c => res.push(c),
}
}
@ -348,20 +343,15 @@ pub fn raw_str_lit(lit: &str) -> String {
// FIXME #8372: This could be a for-loop if it didn't borrow the iterator
let mut chars = lit.chars().peekable();
loop {
match chars.next() {
Some(c) => {
if c == '\r' {
if *chars.peek().unwrap() != '\n' {
panic!("lexer accepted bare CR");
}
chars.next();
res.push('\n');
} else {
res.push(c);
}
},
None => break
while let Some(c) = chars.next() {
if c == '\r' {
if *chars.peek().unwrap() != '\n' {
panic!("lexer accepted bare CR");
}
chars.next();
res.push('\n');
} else {
res.push(c);
}
}
@ -459,7 +449,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
if lit.len() == 1 {
(lit.as_bytes()[0], 1)
} else {
assert!(lit.as_bytes()[0] == b'\\', err(0));
assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
let b = match lit.as_bytes()[1] {
b'"' => b'"',
b'n' => b'\n',
@ -480,7 +470,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
}
}
};
return (b, 2);
(b, 2)
}
}
@ -491,7 +481,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
let error = |i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
@ -578,7 +568,7 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler
if let Some(err) = err {
err!(diag, |span, diag| diag.span_err(span, err));
}
return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
}
}