Omit integer suffix when unnecessary
See PR # 21378 for context
This commit is contained in:
parent
ca4b9674c2
commit
9683745fed
15 changed files with 142 additions and 142 deletions
|
@ -322,21 +322,21 @@ pub fn struct_field_visibility(field: ast::StructField) -> Visibility {
|
||||||
pub fn operator_prec(op: ast::BinOp_) -> usize {
|
pub fn operator_prec(op: ast::BinOp_) -> usize {
|
||||||
match op {
|
match op {
|
||||||
// 'as' sits here with 12
|
// 'as' sits here with 12
|
||||||
BiMul | BiDiv | BiRem => 11us,
|
BiMul | BiDiv | BiRem => 11,
|
||||||
BiAdd | BiSub => 10us,
|
BiAdd | BiSub => 10,
|
||||||
BiShl | BiShr => 9us,
|
BiShl | BiShr => 9,
|
||||||
BiBitAnd => 8us,
|
BiBitAnd => 8,
|
||||||
BiBitXor => 7us,
|
BiBitXor => 7,
|
||||||
BiBitOr => 6us,
|
BiBitOr => 6,
|
||||||
BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3us,
|
BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3,
|
||||||
BiAnd => 2us,
|
BiAnd => 2,
|
||||||
BiOr => 1us
|
BiOr => 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Precedence of the `as` operator, which is a binary operator
|
/// Precedence of the `as` operator, which is a binary operator
|
||||||
/// not appearing in the prior table.
|
/// not appearing in the prior table.
|
||||||
pub const AS_PREC: usize = 12us;
|
pub const AS_PREC: usize = 12;
|
||||||
|
|
||||||
pub fn empty_generics() -> Generics {
|
pub fn empty_generics() -> Generics {
|
||||||
Generics {
|
Generics {
|
||||||
|
|
|
@ -431,7 +431,7 @@ impl CodeMap {
|
||||||
let lo = self.lookup_char_pos(sp.lo);
|
let lo = self.lookup_char_pos(sp.lo);
|
||||||
let hi = self.lookup_char_pos(sp.hi);
|
let hi = self.lookup_char_pos(sp.hi);
|
||||||
let mut lines = Vec::new();
|
let mut lines = Vec::new();
|
||||||
for i in lo.line - 1us..hi.line as usize {
|
for i in lo.line - 1..hi.line as usize {
|
||||||
lines.push(i);
|
lines.push(i);
|
||||||
};
|
};
|
||||||
FileLines {file: lo.file, lines: lines}
|
FileLines {file: lo.file, lines: lines}
|
||||||
|
@ -499,10 +499,10 @@ impl CodeMap {
|
||||||
let files = self.files.borrow();
|
let files = self.files.borrow();
|
||||||
let files = &*files;
|
let files = &*files;
|
||||||
let len = files.len();
|
let len = files.len();
|
||||||
let mut a = 0us;
|
let mut a = 0;
|
||||||
let mut b = len;
|
let mut b = len;
|
||||||
while b - a > 1us {
|
while b - a > 1 {
|
||||||
let m = (a + b) / 2us;
|
let m = (a + b) / 2;
|
||||||
if files[m].start_pos > pos {
|
if files[m].start_pos > pos {
|
||||||
b = m;
|
b = m;
|
||||||
} else {
|
} else {
|
||||||
|
@ -538,12 +538,12 @@ impl CodeMap {
|
||||||
|
|
||||||
let files = self.files.borrow();
|
let files = self.files.borrow();
|
||||||
let f = (*files)[idx].clone();
|
let f = (*files)[idx].clone();
|
||||||
let mut a = 0us;
|
let mut a = 0;
|
||||||
{
|
{
|
||||||
let lines = f.lines.borrow();
|
let lines = f.lines.borrow();
|
||||||
let mut b = lines.len();
|
let mut b = lines.len();
|
||||||
while b - a > 1us {
|
while b - a > 1 {
|
||||||
let m = (a + b) / 2us;
|
let m = (a + b) / 2;
|
||||||
if (*lines)[m] > pos { b = m; } else { a = m; }
|
if (*lines)[m] > pos { b = m; } else { a = m; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -552,7 +552,7 @@ impl CodeMap {
|
||||||
|
|
||||||
fn lookup_pos(&self, pos: BytePos) -> Loc {
|
fn lookup_pos(&self, pos: BytePos) -> Loc {
|
||||||
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
|
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
|
||||||
let line = a + 1us; // Line numbers start at 1
|
let line = a + 1; // Line numbers start at 1
|
||||||
let chpos = self.bytepos_to_file_charpos(pos);
|
let chpos = self.bytepos_to_file_charpos(pos);
|
||||||
let linebpos = (*f.lines.borrow())[a];
|
let linebpos = (*f.lines.borrow())[a];
|
||||||
let linechpos = self.bytepos_to_file_charpos(linebpos);
|
let linechpos = self.bytepos_to_file_charpos(linebpos);
|
||||||
|
@ -763,7 +763,7 @@ mod test {
|
||||||
|
|
||||||
assert_eq!(file_lines.file.name, "blork.rs");
|
assert_eq!(file_lines.file.name, "blork.rs");
|
||||||
assert_eq!(file_lines.lines.len(), 1);
|
assert_eq!(file_lines.lines.len(), 1);
|
||||||
assert_eq!(file_lines.lines[0], 1us);
|
assert_eq!(file_lines.lines[0], 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -25,7 +25,7 @@ use term::WriterWrapper;
|
||||||
use term;
|
use term;
|
||||||
|
|
||||||
/// maximum number of lines we will print for each error; arbitrary.
|
/// maximum number of lines we will print for each error; arbitrary.
|
||||||
static MAX_LINES: usize = 6us;
|
static MAX_LINES: usize = 6;
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub enum RenderSpan {
|
pub enum RenderSpan {
|
||||||
|
@ -155,19 +155,19 @@ impl Handler {
|
||||||
self.bump_err_count();
|
self.bump_err_count();
|
||||||
}
|
}
|
||||||
pub fn bump_err_count(&self) {
|
pub fn bump_err_count(&self) {
|
||||||
self.err_count.set(self.err_count.get() + 1us);
|
self.err_count.set(self.err_count.get() + 1);
|
||||||
}
|
}
|
||||||
pub fn err_count(&self) -> usize {
|
pub fn err_count(&self) -> usize {
|
||||||
self.err_count.get()
|
self.err_count.get()
|
||||||
}
|
}
|
||||||
pub fn has_errors(&self) -> bool {
|
pub fn has_errors(&self) -> bool {
|
||||||
self.err_count.get() > 0us
|
self.err_count.get() > 0
|
||||||
}
|
}
|
||||||
pub fn abort_if_errors(&self) {
|
pub fn abort_if_errors(&self) {
|
||||||
let s;
|
let s;
|
||||||
match self.err_count.get() {
|
match self.err_count.get() {
|
||||||
0us => return,
|
0 => return,
|
||||||
1us => s = "aborting due to previous error".to_string(),
|
1 => s = "aborting due to previous error".to_string(),
|
||||||
_ => {
|
_ => {
|
||||||
s = format!("aborting due to {} previous errors",
|
s = format!("aborting due to {} previous errors",
|
||||||
self.err_count.get());
|
self.err_count.get());
|
||||||
|
@ -457,7 +457,7 @@ fn highlight_lines(err: &mut EmitterWriter,
|
||||||
let mut elided = false;
|
let mut elided = false;
|
||||||
let mut display_lines = &lines.lines[];
|
let mut display_lines = &lines.lines[];
|
||||||
if display_lines.len() > MAX_LINES {
|
if display_lines.len() > MAX_LINES {
|
||||||
display_lines = &display_lines[0us..MAX_LINES];
|
display_lines = &display_lines[0..MAX_LINES];
|
||||||
elided = true;
|
elided = true;
|
||||||
}
|
}
|
||||||
// Print the offending lines
|
// Print the offending lines
|
||||||
|
@ -468,32 +468,32 @@ fn highlight_lines(err: &mut EmitterWriter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if elided {
|
if elided {
|
||||||
let last_line = display_lines[display_lines.len() - 1us];
|
let last_line = display_lines[display_lines.len() - 1];
|
||||||
let s = format!("{}:{} ", fm.name, last_line + 1us);
|
let s = format!("{}:{} ", fm.name, last_line + 1);
|
||||||
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
|
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME (#3260)
|
// FIXME (#3260)
|
||||||
// If there's one line at fault we can easily point to the problem
|
// If there's one line at fault we can easily point to the problem
|
||||||
if lines.lines.len() == 1us {
|
if lines.lines.len() == 1 {
|
||||||
let lo = cm.lookup_char_pos(sp.lo);
|
let lo = cm.lookup_char_pos(sp.lo);
|
||||||
let mut digits = 0us;
|
let mut digits = 0;
|
||||||
let mut num = (lines.lines[0] + 1us) / 10us;
|
let mut num = (lines.lines[0] + 1) / 10;
|
||||||
|
|
||||||
// how many digits must be indent past?
|
// how many digits must be indent past?
|
||||||
while num > 0us { num /= 10us; digits += 1us; }
|
while num > 0 { num /= 10; digits += 1; }
|
||||||
|
|
||||||
// indent past |name:## | and the 0-offset column location
|
// indent past |name:## | and the 0-offset column location
|
||||||
let left = fm.name.len() + digits + lo.col.to_usize() + 3us;
|
let left = fm.name.len() + digits + lo.col.to_usize() + 3;
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
// Skip is the number of characters we need to skip because they are
|
// Skip is the number of characters we need to skip because they are
|
||||||
// part of the 'filename:line ' part of the previous line.
|
// part of the 'filename:line ' part of the previous line.
|
||||||
let skip = fm.name.len() + digits + 3us;
|
let skip = fm.name.len() + digits + 3;
|
||||||
for _ in 0..skip {
|
for _ in 0..skip {
|
||||||
s.push(' ');
|
s.push(' ');
|
||||||
}
|
}
|
||||||
if let Some(orig) = fm.get_line(lines.lines[0]) {
|
if let Some(orig) = fm.get_line(lines.lines[0]) {
|
||||||
for pos in 0us..left - skip {
|
for pos in 0..left - skip {
|
||||||
let cur_char = orig.as_bytes()[pos] as char;
|
let cur_char = orig.as_bytes()[pos] as char;
|
||||||
// Whenever a tab occurs on the previous line, we insert one on
|
// Whenever a tab occurs on the previous line, we insert one on
|
||||||
// the error-point-squiggly-line as well (instead of a space).
|
// the error-point-squiggly-line as well (instead of a space).
|
||||||
|
@ -511,7 +511,7 @@ fn highlight_lines(err: &mut EmitterWriter,
|
||||||
let hi = cm.lookup_char_pos(sp.hi);
|
let hi = cm.lookup_char_pos(sp.hi);
|
||||||
if hi.col != lo.col {
|
if hi.col != lo.col {
|
||||||
// the ^ already takes up one space
|
// the ^ already takes up one space
|
||||||
let num_squigglies = hi.col.to_usize() - lo.col.to_usize() - 1us;
|
let num_squigglies = hi.col.to_usize() - lo.col.to_usize() - 1;
|
||||||
for _ in 0..num_squigglies {
|
for _ in 0..num_squigglies {
|
||||||
s.push('~');
|
s.push('~');
|
||||||
}
|
}
|
||||||
|
|
|
@ -770,7 +770,7 @@ impl<'a> MethodDef<'a> {
|
||||||
let mut raw_fields = Vec::new(); // ~[[fields of self],
|
let mut raw_fields = Vec::new(); // ~[[fields of self],
|
||||||
// [fields of next Self arg], [etc]]
|
// [fields of next Self arg], [etc]]
|
||||||
let mut patterns = Vec::new();
|
let mut patterns = Vec::new();
|
||||||
for i in 0us..self_args.len() {
|
for i in 0..self_args.len() {
|
||||||
let struct_path= cx.path(DUMMY_SP, vec!( type_ident ));
|
let struct_path= cx.path(DUMMY_SP, vec!( type_ident ));
|
||||||
let (pat, ident_expr) =
|
let (pat, ident_expr) =
|
||||||
trait_.create_struct_pattern(cx,
|
trait_.create_struct_pattern(cx,
|
||||||
|
@ -859,8 +859,8 @@ impl<'a> MethodDef<'a> {
|
||||||
/// (&A2(ref __self_0),
|
/// (&A2(ref __self_0),
|
||||||
/// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)),
|
/// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)),
|
||||||
/// _ => {
|
/// _ => {
|
||||||
/// let __self_vi = match *self { A1(..) => 0us, A2(..) => 1us };
|
/// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 };
|
||||||
/// let __arg_1_vi = match *__arg_1 { A1(..) => 0us, A2(..) => 1us };
|
/// let __arg_1_vi = match *__arg_1 { A1(..) => 0, A2(..) => 1 };
|
||||||
/// false
|
/// false
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
|
@ -904,8 +904,8 @@ impl<'a> MethodDef<'a> {
|
||||||
/// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2
|
/// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2
|
||||||
/// ...
|
/// ...
|
||||||
/// _ => {
|
/// _ => {
|
||||||
/// let __this_vi = match this { Variant1 => 0us, Variant2 => 1us, ... };
|
/// let __this_vi = match this { Variant1 => 0, Variant2 => 1, ... };
|
||||||
/// let __that_vi = match that { Variant1 => 0us, Variant2 => 1us, ... };
|
/// let __that_vi = match that { Variant1 => 0, Variant2 => 1, ... };
|
||||||
/// ... // catch-all remainder can inspect above variant index values.
|
/// ... // catch-all remainder can inspect above variant index values.
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
|
@ -1067,13 +1067,13 @@ impl<'a> MethodDef<'a> {
|
||||||
//
|
//
|
||||||
// ```
|
// ```
|
||||||
// let __self0_vi = match self {
|
// let __self0_vi = match self {
|
||||||
// A => 0us, B(..) => 1us, C(..) => 2us
|
// A => 0, B(..) => 1, C(..) => 2
|
||||||
// };
|
// };
|
||||||
// let __self1_vi = match __arg1 {
|
// let __self1_vi = match __arg1 {
|
||||||
// A => 0us, B(..) => 1us, C(..) => 2us
|
// A => 0, B(..) => 1, C(..) => 2
|
||||||
// };
|
// };
|
||||||
// let __self2_vi = match __arg2 {
|
// let __self2_vi = match __arg2 {
|
||||||
// A => 0us, B(..) => 1us, C(..) => 2us
|
// A => 0, B(..) => 1, C(..) => 2
|
||||||
// };
|
// };
|
||||||
// ```
|
// ```
|
||||||
let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new();
|
let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new();
|
||||||
|
|
|
@ -362,7 +362,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||||
// in this file.
|
// in this file.
|
||||||
// Token-tree macros:
|
// Token-tree macros:
|
||||||
MacInvocTT(pth, tts, _) => {
|
MacInvocTT(pth, tts, _) => {
|
||||||
if pth.segments.len() > 1us {
|
if pth.segments.len() > 1 {
|
||||||
fld.cx.span_err(pth.span,
|
fld.cx.span_err(pth.span,
|
||||||
"expected macro name without module \
|
"expected macro name without module \
|
||||||
separators");
|
separators");
|
||||||
|
@ -931,7 +931,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||||
},
|
},
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
if pth.segments.len() > 1us {
|
if pth.segments.len() > 1 {
|
||||||
fld.cx.span_err(pth.span, "expected macro name without module separators");
|
fld.cx.span_err(pth.span, "expected macro name without module separators");
|
||||||
return DummyResult::raw_pat(span);
|
return DummyResult::raw_pat(span);
|
||||||
}
|
}
|
||||||
|
|
|
@ -709,7 +709,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||||
// try removing it when enough of them are gone.
|
// try removing it when enough of them are gone.
|
||||||
|
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
p.quote_depth += 1us;
|
p.quote_depth += 1;
|
||||||
|
|
||||||
let cx_expr = p.parse_expr();
|
let cx_expr = p.parse_expr();
|
||||||
if !p.eat(&token::Comma) {
|
if !p.eat(&token::Comma) {
|
||||||
|
|
|
@ -171,11 +171,11 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
|
||||||
stack: vec![],
|
stack: vec![],
|
||||||
top_elts: TtSeq(ms),
|
top_elts: TtSeq(ms),
|
||||||
sep: sep,
|
sep: sep,
|
||||||
idx: 0us,
|
idx: 0,
|
||||||
up: None,
|
up: None,
|
||||||
matches: matches,
|
matches: matches,
|
||||||
match_lo: 0us,
|
match_lo: 0,
|
||||||
match_cur: 0us,
|
match_cur: 0,
|
||||||
match_hi: match_idx_hi,
|
match_hi: match_idx_hi,
|
||||||
sp_lo: lo
|
sp_lo: lo
|
||||||
}
|
}
|
||||||
|
@ -238,7 +238,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut ret_val = HashMap::new();
|
let mut ret_val = HashMap::new();
|
||||||
let mut idx = 0us;
|
let mut idx = 0;
|
||||||
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
|
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
|
||||||
ret_val
|
ret_val
|
||||||
}
|
}
|
||||||
|
@ -383,7 +383,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
if seq.op == ast::ZeroOrMore {
|
if seq.op == ast::ZeroOrMore {
|
||||||
let mut new_ei = ei.clone();
|
let mut new_ei = ei.clone();
|
||||||
new_ei.match_cur += seq.num_captures;
|
new_ei.match_cur += seq.num_captures;
|
||||||
new_ei.idx += 1us;
|
new_ei.idx += 1;
|
||||||
//we specifically matched zero repeats.
|
//we specifically matched zero repeats.
|
||||||
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
|
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
|
||||||
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
|
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
|
||||||
|
@ -398,7 +398,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
cur_eis.push(box MatcherPos {
|
cur_eis.push(box MatcherPos {
|
||||||
stack: vec![],
|
stack: vec![],
|
||||||
sep: seq.separator.clone(),
|
sep: seq.separator.clone(),
|
||||||
idx: 0us,
|
idx: 0,
|
||||||
matches: matches,
|
matches: matches,
|
||||||
match_lo: ei_t.match_cur,
|
match_lo: ei_t.match_cur,
|
||||||
match_cur: ei_t.match_cur,
|
match_cur: ei_t.match_cur,
|
||||||
|
@ -442,20 +442,20 @@ pub fn parse(sess: &ParseSess,
|
||||||
|
|
||||||
/* error messages here could be improved with links to orig. rules */
|
/* error messages here could be improved with links to orig. rules */
|
||||||
if token_name_eq(&tok, &token::Eof) {
|
if token_name_eq(&tok, &token::Eof) {
|
||||||
if eof_eis.len() == 1us {
|
if eof_eis.len() == 1 {
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
for dv in (&mut eof_eis[0]).matches.iter_mut() {
|
for dv in (&mut eof_eis[0]).matches.iter_mut() {
|
||||||
v.push(dv.pop().unwrap());
|
v.push(dv.pop().unwrap());
|
||||||
}
|
}
|
||||||
return Success(nameize(sess, ms, &v[]));
|
return Success(nameize(sess, ms, &v[]));
|
||||||
} else if eof_eis.len() > 1us {
|
} else if eof_eis.len() > 1 {
|
||||||
return Error(sp, "ambiguity: multiple successful parses".to_string());
|
return Error(sp, "ambiguity: multiple successful parses".to_string());
|
||||||
} else {
|
} else {
|
||||||
return Failure(sp, "unexpected end of macro invocation".to_string());
|
return Failure(sp, "unexpected end of macro invocation".to_string());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (bb_eis.len() > 0us && next_eis.len() > 0us)
|
if (bb_eis.len() > 0 && next_eis.len() > 0)
|
||||||
|| bb_eis.len() > 1us {
|
|| bb_eis.len() > 1 {
|
||||||
let nts = bb_eis.iter().map(|ei| {
|
let nts = bb_eis.iter().map(|ei| {
|
||||||
match ei.top_elts.get_tt(ei.idx) {
|
match ei.top_elts.get_tt(ei.idx) {
|
||||||
TtToken(_, MatchNt(bind, name, _, _)) => {
|
TtToken(_, MatchNt(bind, name, _, _)) => {
|
||||||
|
@ -469,12 +469,12 @@ pub fn parse(sess: &ParseSess,
|
||||||
"local ambiguity: multiple parsing options: \
|
"local ambiguity: multiple parsing options: \
|
||||||
built-in NTs {} or {} other options.",
|
built-in NTs {} or {} other options.",
|
||||||
nts, next_eis.len()).to_string());
|
nts, next_eis.len()).to_string());
|
||||||
} else if bb_eis.len() == 0us && next_eis.len() == 0us {
|
} else if bb_eis.len() == 0 && next_eis.len() == 0 {
|
||||||
return Failure(sp, format!("no rules expected the token `{}`",
|
return Failure(sp, format!("no rules expected the token `{}`",
|
||||||
pprust::token_to_string(&tok)).to_string());
|
pprust::token_to_string(&tok)).to_string());
|
||||||
} else if next_eis.len() > 0us {
|
} else if next_eis.len() > 0 {
|
||||||
/* Now process the next token */
|
/* Now process the next token */
|
||||||
while next_eis.len() > 0us {
|
while next_eis.len() > 0 {
|
||||||
cur_eis.push(next_eis.pop().unwrap());
|
cur_eis.push(next_eis.pop().unwrap());
|
||||||
}
|
}
|
||||||
rdr.next_token();
|
rdr.next_token();
|
||||||
|
@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
let match_cur = ei.match_cur;
|
let match_cur = ei.match_cur;
|
||||||
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
||||||
parse_nt(&mut rust_parser, span, name_string.get()))));
|
parse_nt(&mut rust_parser, span, name_string.get()))));
|
||||||
ei.idx += 1us;
|
ei.idx += 1;
|
||||||
ei.match_cur += 1;
|
ei.match_cur += 1;
|
||||||
}
|
}
|
||||||
_ => panic!()
|
_ => panic!()
|
||||||
|
@ -501,16 +501,16 @@ pub fn parse(sess: &ParseSess,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(cur_eis.len() > 0us);
|
assert!(cur_eis.len() > 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
|
pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
|
||||||
match name {
|
match name {
|
||||||
"tt" => {
|
"tt" => {
|
||||||
p.quote_depth += 1us; //but in theory, non-quoted tts might be useful
|
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
|
||||||
let res = token::NtTT(P(p.parse_token_tree()));
|
let res = token::NtTT(P(p.parse_token_tree()));
|
||||||
p.quote_depth -= 1us;
|
p.quote_depth -= 1;
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
|
@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
r.repeat_len.pop();
|
r.repeat_len.pop();
|
||||||
}
|
}
|
||||||
} else { /* repeat */
|
} else { /* repeat */
|
||||||
*r.repeat_idx.last_mut().unwrap() += 1us;
|
*r.repeat_idx.last_mut().unwrap() += 1;
|
||||||
r.stack.last_mut().unwrap().idx = 0;
|
r.stack.last_mut().unwrap().idx = 0;
|
||||||
match r.stack.last().unwrap().sep.clone() {
|
match r.stack.last().unwrap().sep.clone() {
|
||||||
Some(tk) => {
|
Some(tk) => {
|
||||||
|
|
|
@ -62,7 +62,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
|
||||||
pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
||||||
/// remove whitespace-only lines from the start/end of lines
|
/// remove whitespace-only lines from the start/end of lines
|
||||||
fn vertical_trim(lines: Vec<String> ) -> Vec<String> {
|
fn vertical_trim(lines: Vec<String> ) -> Vec<String> {
|
||||||
let mut i = 0us;
|
let mut i = 0;
|
||||||
let mut j = lines.len();
|
let mut j = lines.len();
|
||||||
// first line of all-stars should be omitted
|
// first line of all-stars should be omitted
|
||||||
if lines.len() > 0 &&
|
if lines.len() > 0 &&
|
||||||
|
@ -158,7 +158,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) {
|
||||||
fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader,
|
fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader,
|
||||||
comments: &mut Vec<Comment>) {
|
comments: &mut Vec<Comment>) {
|
||||||
while is_whitespace(rdr.curr) && !rdr.is_eof() {
|
while is_whitespace(rdr.curr) && !rdr.is_eof() {
|
||||||
if rdr.col == CharPos(0us) && rdr.curr_is('\n') {
|
if rdr.col == CharPos(0) && rdr.curr_is('\n') {
|
||||||
push_blank_line_comment(rdr, &mut *comments);
|
push_blank_line_comment(rdr, &mut *comments);
|
||||||
}
|
}
|
||||||
rdr.bump();
|
rdr.bump();
|
||||||
|
@ -305,7 +305,7 @@ fn read_block_comment(rdr: &mut StringReader,
|
||||||
|
|
||||||
let mut style = if code_to_the_left { Trailing } else { Isolated };
|
let mut style = if code_to_the_left { Trailing } else { Isolated };
|
||||||
rdr.consume_non_eol_whitespace();
|
rdr.consume_non_eol_whitespace();
|
||||||
if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1us {
|
if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1 {
|
||||||
style = Mixed;
|
style = Mixed;
|
||||||
}
|
}
|
||||||
debug!("<<< block comment");
|
debug!("<<< block comment");
|
||||||
|
|
|
@ -279,7 +279,7 @@ impl<'a> StringReader<'a> {
|
||||||
/// Converts CRLF to LF in the given string, raising an error on bare CR.
|
/// Converts CRLF to LF in the given string, raising an error on bare CR.
|
||||||
fn translate_crlf<'b>(&self, start: BytePos,
|
fn translate_crlf<'b>(&self, start: BytePos,
|
||||||
s: &'b str, errmsg: &'b str) -> CowString<'b> {
|
s: &'b str, errmsg: &'b str) -> CowString<'b> {
|
||||||
let mut i = 0us;
|
let mut i = 0;
|
||||||
while i < s.len() {
|
while i < s.len() {
|
||||||
let str::CharRange { ch, next } = s.char_range_at(i);
|
let str::CharRange { ch, next } = s.char_range_at(i);
|
||||||
if ch == '\r' {
|
if ch == '\r' {
|
||||||
|
@ -331,10 +331,10 @@ impl<'a> StringReader<'a> {
|
||||||
let byte_offset_diff = next.next - current_byte_offset;
|
let byte_offset_diff = next.next - current_byte_offset;
|
||||||
self.pos = self.pos + Pos::from_usize(byte_offset_diff);
|
self.pos = self.pos + Pos::from_usize(byte_offset_diff);
|
||||||
self.curr = Some(next.ch);
|
self.curr = Some(next.ch);
|
||||||
self.col = self.col + CharPos(1us);
|
self.col = self.col + CharPos(1);
|
||||||
if last_char == '\n' {
|
if last_char == '\n' {
|
||||||
self.filemap.next_line(self.last_pos);
|
self.filemap.next_line(self.last_pos);
|
||||||
self.col = CharPos(0us);
|
self.col = CharPos(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if byte_offset_diff > 1 {
|
if byte_offset_diff > 1 {
|
||||||
|
@ -472,7 +472,7 @@ impl<'a> StringReader<'a> {
|
||||||
cmap.files.borrow_mut().push(self.filemap.clone());
|
cmap.files.borrow_mut().push(self.filemap.clone());
|
||||||
let loc = cmap.lookup_char_pos_adj(self.last_pos);
|
let loc = cmap.lookup_char_pos_adj(self.last_pos);
|
||||||
debug!("Skipping a shebang");
|
debug!("Skipping a shebang");
|
||||||
if loc.line == 1us && loc.col == CharPos(0us) {
|
if loc.line == 1 && loc.col == CharPos(0) {
|
||||||
// FIXME: Add shebang "token", return it
|
// FIXME: Add shebang "token", return it
|
||||||
let start = self.last_pos;
|
let start = self.last_pos;
|
||||||
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
|
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
|
||||||
|
@ -646,7 +646,7 @@ impl<'a> StringReader<'a> {
|
||||||
/// Scan through any digits (base `radix`) or underscores, and return how
|
/// Scan through any digits (base `radix`) or underscores, and return how
|
||||||
/// many digits there were.
|
/// many digits there were.
|
||||||
fn scan_digits(&mut self, radix: usize) -> usize {
|
fn scan_digits(&mut self, radix: usize) -> usize {
|
||||||
let mut len = 0us;
|
let mut len = 0;
|
||||||
loop {
|
loop {
|
||||||
let c = self.curr;
|
let c = self.curr;
|
||||||
if c == Some('_') { debug!("skipping a _"); self.bump(); continue; }
|
if c == Some('_') { debug!("skipping a _"); self.bump(); continue; }
|
||||||
|
@ -799,14 +799,14 @@ impl<'a> StringReader<'a> {
|
||||||
if self.curr == Some('{') {
|
if self.curr == Some('{') {
|
||||||
self.scan_unicode_escape(delim)
|
self.scan_unicode_escape(delim)
|
||||||
} else {
|
} else {
|
||||||
let res = self.scan_hex_digits(4us, delim, false);
|
let res = self.scan_hex_digits(4, delim, false);
|
||||||
let sp = codemap::mk_sp(escaped_pos, self.last_pos);
|
let sp = codemap::mk_sp(escaped_pos, self.last_pos);
|
||||||
self.old_escape_warning(sp);
|
self.old_escape_warning(sp);
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
'U' if !ascii_only => {
|
'U' if !ascii_only => {
|
||||||
let res = self.scan_hex_digits(8us, delim, false);
|
let res = self.scan_hex_digits(8, delim, false);
|
||||||
let sp = codemap::mk_sp(escaped_pos, self.last_pos);
|
let sp = codemap::mk_sp(escaped_pos, self.last_pos);
|
||||||
self.old_escape_warning(sp);
|
self.old_escape_warning(sp);
|
||||||
res
|
res
|
||||||
|
@ -877,7 +877,7 @@ impl<'a> StringReader<'a> {
|
||||||
fn scan_unicode_escape(&mut self, delim: char) -> bool {
|
fn scan_unicode_escape(&mut self, delim: char) -> bool {
|
||||||
self.bump(); // past the {
|
self.bump(); // past the {
|
||||||
let start_bpos = self.last_pos;
|
let start_bpos = self.last_pos;
|
||||||
let mut count = 0us;
|
let mut count = 0;
|
||||||
let mut accum_int = 0;
|
let mut accum_int = 0;
|
||||||
|
|
||||||
while !self.curr_is('}') && count <= 6 {
|
while !self.curr_is('}') && count <= 6 {
|
||||||
|
@ -937,10 +937,10 @@ impl<'a> StringReader<'a> {
|
||||||
/// error if it isn't.
|
/// error if it isn't.
|
||||||
fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
|
fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
|
||||||
match base {
|
match base {
|
||||||
16us => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
|
16 => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
|
||||||
supported"),
|
supported"),
|
||||||
8us => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"),
|
8 => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"),
|
||||||
2us => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"),
|
2 => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1189,7 +1189,7 @@ impl<'a> StringReader<'a> {
|
||||||
'r' => {
|
'r' => {
|
||||||
let start_bpos = self.last_pos;
|
let start_bpos = self.last_pos;
|
||||||
self.bump();
|
self.bump();
|
||||||
let mut hash_count = 0us;
|
let mut hash_count = 0;
|
||||||
while self.curr_is('#') {
|
while self.curr_is('#') {
|
||||||
self.bump();
|
self.bump();
|
||||||
hash_count += 1;
|
hash_count += 1;
|
||||||
|
@ -1374,7 +1374,7 @@ impl<'a> StringReader<'a> {
|
||||||
fn scan_raw_byte_string(&mut self) -> token::Lit {
|
fn scan_raw_byte_string(&mut self) -> token::Lit {
|
||||||
let start_bpos = self.last_pos;
|
let start_bpos = self.last_pos;
|
||||||
self.bump();
|
self.bump();
|
||||||
let mut hash_count = 0us;
|
let mut hash_count = 0;
|
||||||
while self.curr_is('#') {
|
while self.curr_is('#') {
|
||||||
self.bump();
|
self.bump();
|
||||||
hash_count += 1;
|
hash_count += 1;
|
||||||
|
|
|
@ -181,7 +181,7 @@ pub fn parse_tts_from_source_str(name: String,
|
||||||
name,
|
name,
|
||||||
source
|
source
|
||||||
);
|
);
|
||||||
p.quote_depth += 1us;
|
p.quote_depth += 1;
|
||||||
// right now this is re-creating the token trees from ... token trees.
|
// right now this is re-creating the token trees from ... token trees.
|
||||||
maybe_aborted(p.parse_all_token_trees(),p)
|
maybe_aborted(p.parse_all_token_trees(),p)
|
||||||
}
|
}
|
||||||
|
@ -324,7 +324,7 @@ pub mod with_hygiene {
|
||||||
name,
|
name,
|
||||||
source
|
source
|
||||||
);
|
);
|
||||||
p.quote_depth += 1us;
|
p.quote_depth += 1;
|
||||||
// right now this is re-creating the token trees from ... token trees.
|
// right now this is re-creating the token trees from ... token trees.
|
||||||
maybe_aborted(p.parse_all_token_trees(),p)
|
maybe_aborted(p.parse_all_token_trees(),p)
|
||||||
}
|
}
|
||||||
|
@ -683,9 +683,9 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
|
||||||
match suffix {
|
match suffix {
|
||||||
Some(suf) if looks_like_width_suffix(&['f'], suf) => {
|
Some(suf) if looks_like_width_suffix(&['f'], suf) => {
|
||||||
match base {
|
match base {
|
||||||
16us => sd.span_err(sp, "hexadecimal float literal is not supported"),
|
16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
|
||||||
8us => sd.span_err(sp, "octal float literal is not supported"),
|
8 => sd.span_err(sp, "octal float literal is not supported"),
|
||||||
2us => sd.span_err(sp, "binary float literal is not supported"),
|
2 => sd.span_err(sp, "binary float literal is not supported"),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
let ident = token::intern_and_get_ident(&*s);
|
let ident = token::intern_and_get_ident(&*s);
|
||||||
|
|
|
@ -740,7 +740,7 @@ impl<'a> Parser<'a> {
|
||||||
// would encounter a `>` and stop. This lets the parser handle trailing
|
// would encounter a `>` and stop. This lets the parser handle trailing
|
||||||
// commas in generic parameters, because it can stop either after
|
// commas in generic parameters, because it can stop either after
|
||||||
// parsing a type or after parsing a comma.
|
// parsing a type or after parsing a comma.
|
||||||
for i in iter::count(0us, 1) {
|
for i in iter::count(0, 1) {
|
||||||
if self.check(&token::Gt)
|
if self.check(&token::Gt)
|
||||||
|| self.token == token::BinOp(token::Shr)
|
|| self.token == token::BinOp(token::Shr)
|
||||||
|| self.token == token::Ge
|
|| self.token == token::Ge
|
||||||
|
@ -917,7 +917,7 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
self.span = next.sp;
|
self.span = next.sp;
|
||||||
self.token = next.tok;
|
self.token = next.tok;
|
||||||
self.tokens_consumed += 1us;
|
self.tokens_consumed += 1;
|
||||||
self.expected_tokens.clear();
|
self.expected_tokens.clear();
|
||||||
// check after each token
|
// check after each token
|
||||||
self.check_unknown_macro_variable();
|
self.check_unknown_macro_variable();
|
||||||
|
@ -2625,7 +2625,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_unknown_macro_variable(&mut self) {
|
pub fn check_unknown_macro_variable(&mut self) {
|
||||||
if self.quote_depth == 0us {
|
if self.quote_depth == 0 {
|
||||||
match self.token {
|
match self.token {
|
||||||
token::SubstNt(name, _) =>
|
token::SubstNt(name, _) =>
|
||||||
self.fatal(&format!("unknown macro variable `{}`",
|
self.fatal(&format!("unknown macro variable `{}`",
|
||||||
|
@ -2694,7 +2694,7 @@ impl<'a> Parser<'a> {
|
||||||
token_str)[])
|
token_str)[])
|
||||||
},
|
},
|
||||||
/* we ought to allow different depths of unquotation */
|
/* we ought to allow different depths of unquotation */
|
||||||
token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => {
|
token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => {
|
||||||
p.parse_unquoted()
|
p.parse_unquoted()
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -5633,7 +5633,7 @@ impl<'a> Parser<'a> {
|
||||||
return Ok(item);
|
return Ok(item);
|
||||||
}
|
}
|
||||||
if self.token.is_keyword(keywords::Unsafe) &&
|
if self.token.is_keyword(keywords::Unsafe) &&
|
||||||
self.look_ahead(1us, |t| t.is_keyword(keywords::Trait))
|
self.look_ahead(1, |t| t.is_keyword(keywords::Trait))
|
||||||
{
|
{
|
||||||
// UNSAFE TRAIT ITEM
|
// UNSAFE TRAIT ITEM
|
||||||
self.expect_keyword(keywords::Unsafe);
|
self.expect_keyword(keywords::Unsafe);
|
||||||
|
@ -5650,7 +5650,7 @@ impl<'a> Parser<'a> {
|
||||||
return Ok(item);
|
return Ok(item);
|
||||||
}
|
}
|
||||||
if self.token.is_keyword(keywords::Unsafe) &&
|
if self.token.is_keyword(keywords::Unsafe) &&
|
||||||
self.look_ahead(1us, |t| t.is_keyword(keywords::Impl))
|
self.look_ahead(1, |t| t.is_keyword(keywords::Impl))
|
||||||
{
|
{
|
||||||
// IMPL ITEM
|
// IMPL ITEM
|
||||||
self.expect_keyword(keywords::Unsafe);
|
self.expect_keyword(keywords::Unsafe);
|
||||||
|
@ -5680,7 +5680,7 @@ impl<'a> Parser<'a> {
|
||||||
return Ok(item);
|
return Ok(item);
|
||||||
}
|
}
|
||||||
if self.token.is_keyword(keywords::Unsafe)
|
if self.token.is_keyword(keywords::Unsafe)
|
||||||
&& self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) {
|
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
|
||||||
// UNSAFE FUNCTION ITEM
|
// UNSAFE FUNCTION ITEM
|
||||||
self.bump();
|
self.bump();
|
||||||
let abi = if self.eat_keyword(keywords::Extern) {
|
let abi = if self.eat_keyword(keywords::Extern) {
|
||||||
|
@ -5958,7 +5958,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut rename_to = path[path.len() - 1us];
|
let mut rename_to = path[path.len() - 1];
|
||||||
let path = ast::Path {
|
let path = ast::Path {
|
||||||
span: mk_sp(lo, self.last_span.hi),
|
span: mk_sp(lo, self.last_span.hi),
|
||||||
global: false,
|
global: false,
|
||||||
|
|
|
@ -132,15 +132,15 @@ pub fn buf_str(toks: &[Token],
|
||||||
let mut i = left;
|
let mut i = left;
|
||||||
let mut l = lim;
|
let mut l = lim;
|
||||||
let mut s = string::String::from_str("[");
|
let mut s = string::String::from_str("[");
|
||||||
while i != right && l != 0us {
|
while i != right && l != 0 {
|
||||||
l -= 1us;
|
l -= 1;
|
||||||
if i != left {
|
if i != left {
|
||||||
s.push_str(", ");
|
s.push_str(", ");
|
||||||
}
|
}
|
||||||
s.push_str(&format!("{}={}",
|
s.push_str(&format!("{}={}",
|
||||||
szs[i],
|
szs[i],
|
||||||
tok_str(&toks[i]))[]);
|
tok_str(&toks[i]))[]);
|
||||||
i += 1us;
|
i += 1;
|
||||||
i %= n;
|
i %= n;
|
||||||
}
|
}
|
||||||
s.push(']');
|
s.push(']');
|
||||||
|
@ -326,8 +326,8 @@ impl Printer {
|
||||||
if self.scan_stack_empty {
|
if self.scan_stack_empty {
|
||||||
self.left_total = 1;
|
self.left_total = 1;
|
||||||
self.right_total = 1;
|
self.right_total = 1;
|
||||||
self.left = 0us;
|
self.left = 0;
|
||||||
self.right = 0us;
|
self.right = 0;
|
||||||
} else { self.advance_right(); }
|
} else { self.advance_right(); }
|
||||||
debug!("pp Begin({})/buffer ~[{},{}]",
|
debug!("pp Begin({})/buffer ~[{},{}]",
|
||||||
b.offset, self.left, self.right);
|
b.offset, self.left, self.right);
|
||||||
|
@ -355,8 +355,8 @@ impl Printer {
|
||||||
if self.scan_stack_empty {
|
if self.scan_stack_empty {
|
||||||
self.left_total = 1;
|
self.left_total = 1;
|
||||||
self.right_total = 1;
|
self.right_total = 1;
|
||||||
self.left = 0us;
|
self.left = 0;
|
||||||
self.right = 0us;
|
self.right = 0;
|
||||||
} else { self.advance_right(); }
|
} else { self.advance_right(); }
|
||||||
debug!("pp Break({})/buffer ~[{},{}]",
|
debug!("pp Break({})/buffer ~[{},{}]",
|
||||||
b.offset, self.left, self.right);
|
b.offset, self.left, self.right);
|
||||||
|
@ -410,7 +410,7 @@ impl Printer {
|
||||||
if self.scan_stack_empty {
|
if self.scan_stack_empty {
|
||||||
self.scan_stack_empty = false;
|
self.scan_stack_empty = false;
|
||||||
} else {
|
} else {
|
||||||
self.top += 1us;
|
self.top += 1;
|
||||||
self.top %= self.buf_len;
|
self.top %= self.buf_len;
|
||||||
assert!((self.top != self.bottom));
|
assert!((self.top != self.bottom));
|
||||||
}
|
}
|
||||||
|
@ -422,7 +422,7 @@ impl Printer {
|
||||||
if self.top == self.bottom {
|
if self.top == self.bottom {
|
||||||
self.scan_stack_empty = true;
|
self.scan_stack_empty = true;
|
||||||
} else {
|
} else {
|
||||||
self.top += self.buf_len - 1us; self.top %= self.buf_len;
|
self.top += self.buf_len - 1; self.top %= self.buf_len;
|
||||||
}
|
}
|
||||||
return x;
|
return x;
|
||||||
}
|
}
|
||||||
|
@ -436,12 +436,12 @@ impl Printer {
|
||||||
if self.top == self.bottom {
|
if self.top == self.bottom {
|
||||||
self.scan_stack_empty = true;
|
self.scan_stack_empty = true;
|
||||||
} else {
|
} else {
|
||||||
self.bottom += 1us; self.bottom %= self.buf_len;
|
self.bottom += 1; self.bottom %= self.buf_len;
|
||||||
}
|
}
|
||||||
return x;
|
return x;
|
||||||
}
|
}
|
||||||
pub fn advance_right(&mut self) {
|
pub fn advance_right(&mut self) {
|
||||||
self.right += 1us;
|
self.right += 1;
|
||||||
self.right %= self.buf_len;
|
self.right %= self.buf_len;
|
||||||
assert!((self.right != self.left));
|
assert!((self.right != self.left));
|
||||||
}
|
}
|
||||||
|
@ -471,7 +471,7 @@ impl Printer {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.left += 1us;
|
self.left += 1;
|
||||||
self.left %= self.buf_len;
|
self.left %= self.buf_len;
|
||||||
|
|
||||||
left_size = self.size[self.left];
|
left_size = self.size[self.left];
|
||||||
|
@ -520,7 +520,7 @@ impl Printer {
|
||||||
pub fn get_top(&mut self) -> PrintStackElem {
|
pub fn get_top(&mut self) -> PrintStackElem {
|
||||||
let print_stack = &mut self.print_stack;
|
let print_stack = &mut self.print_stack;
|
||||||
let n = print_stack.len();
|
let n = print_stack.len();
|
||||||
if n != 0us {
|
if n != 0 {
|
||||||
(*print_stack)[n - 1]
|
(*print_stack)[n - 1]
|
||||||
} else {
|
} else {
|
||||||
PrintStackElem {
|
PrintStackElem {
|
||||||
|
@ -565,7 +565,7 @@ impl Printer {
|
||||||
Token::End => {
|
Token::End => {
|
||||||
debug!("print End -> pop End");
|
debug!("print End -> pop End");
|
||||||
let print_stack = &mut self.print_stack;
|
let print_stack = &mut self.print_stack;
|
||||||
assert!((print_stack.len() != 0us));
|
assert!((print_stack.len() != 0));
|
||||||
print_stack.pop().unwrap();
|
print_stack.pop().unwrap();
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -667,11 +667,11 @@ pub fn spaces(p: &mut Printer, n: usize) -> old_io::IoResult<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn zerobreak(p: &mut Printer) -> old_io::IoResult<()> {
|
pub fn zerobreak(p: &mut Printer) -> old_io::IoResult<()> {
|
||||||
spaces(p, 0us)
|
spaces(p, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn space(p: &mut Printer) -> old_io::IoResult<()> {
|
pub fn space(p: &mut Printer) -> old_io::IoResult<()> {
|
||||||
spaces(p, 1us)
|
spaces(p, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hardbreak(p: &mut Printer) -> old_io::IoResult<()> {
|
pub fn hardbreak(p: &mut Printer) -> old_io::IoResult<()> {
|
||||||
|
|
|
@ -92,10 +92,10 @@ pub fn rust_printer_annotated<'a>(writer: Box<old_io::Writer+'static>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
pub const indent_unit: usize = 4us;
|
pub const indent_unit: usize = 4;
|
||||||
|
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
pub const default_columns: usize = 78us;
|
pub const default_columns: usize = 78;
|
||||||
|
|
||||||
/// Requires you to pass an input filename and reader so that
|
/// Requires you to pass an input filename and reader so that
|
||||||
/// it can scan the input text for comments and literals to
|
/// it can scan the input text for comments and literals to
|
||||||
|
@ -377,7 +377,7 @@ pub fn block_to_string(blk: &ast::Block) -> String {
|
||||||
// containing cbox, will be closed by print-block at }
|
// containing cbox, will be closed by print-block at }
|
||||||
try!(s.cbox(indent_unit));
|
try!(s.cbox(indent_unit));
|
||||||
// head-ibox, will be closed by print-block after {
|
// head-ibox, will be closed by print-block after {
|
||||||
try!(s.ibox(0us));
|
try!(s.ibox(0));
|
||||||
s.print_block(blk)
|
s.print_block(blk)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -516,7 +516,7 @@ impl<'a> State<'a> {
|
||||||
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
|
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
|
||||||
indented: usize, close_box: bool) -> IoResult<()> {
|
indented: usize, close_box: bool) -> IoResult<()> {
|
||||||
try!(self.maybe_print_comment(span.hi));
|
try!(self.maybe_print_comment(span.hi));
|
||||||
try!(self.break_offset_if_not_bol(1us, -(indented as isize)));
|
try!(self.break_offset_if_not_bol(1, -(indented as isize)));
|
||||||
try!(word(&mut self.s, "}"));
|
try!(word(&mut self.s, "}"));
|
||||||
if close_box {
|
if close_box {
|
||||||
try!(self.end()); // close the outer-box
|
try!(self.end()); // close the outer-box
|
||||||
|
@ -591,7 +591,7 @@ impl<'a> State<'a> {
|
||||||
pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where
|
pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where
|
||||||
F: FnMut(&mut State, &T) -> IoResult<()>,
|
F: FnMut(&mut State, &T) -> IoResult<()>,
|
||||||
{
|
{
|
||||||
try!(self.rbox(0us, b));
|
try!(self.rbox(0, b));
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for elt in elts.iter() {
|
for elt in elts.iter() {
|
||||||
if first { first = false; } else { try!(self.word_space(",")); }
|
if first { first = false; } else { try!(self.word_space(",")); }
|
||||||
|
@ -609,13 +609,13 @@ impl<'a> State<'a> {
|
||||||
F: FnMut(&mut State, &T) -> IoResult<()>,
|
F: FnMut(&mut State, &T) -> IoResult<()>,
|
||||||
G: FnMut(&T) -> codemap::Span,
|
G: FnMut(&T) -> codemap::Span,
|
||||||
{
|
{
|
||||||
try!(self.rbox(0us, b));
|
try!(self.rbox(0, b));
|
||||||
let len = elts.len();
|
let len = elts.len();
|
||||||
let mut i = 0us;
|
let mut i = 0;
|
||||||
for elt in elts.iter() {
|
for elt in elts.iter() {
|
||||||
try!(self.maybe_print_comment(get_span(elt).hi));
|
try!(self.maybe_print_comment(get_span(elt).hi));
|
||||||
try!(op(self, elt));
|
try!(op(self, elt));
|
||||||
i += 1us;
|
i += 1;
|
||||||
if i < len {
|
if i < len {
|
||||||
try!(word(&mut self.s, ","));
|
try!(word(&mut self.s, ","));
|
||||||
try!(self.maybe_print_trailing_comment(get_span(elt),
|
try!(self.maybe_print_trailing_comment(get_span(elt),
|
||||||
|
@ -660,7 +660,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
|
pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
|
||||||
try!(self.maybe_print_comment(ty.span.lo));
|
try!(self.maybe_print_comment(ty.span.lo));
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
match ty.node {
|
match ty.node {
|
||||||
ast::TyVec(ref ty) => {
|
ast::TyVec(ref ty) => {
|
||||||
try!(word(&mut self.s, "["));
|
try!(word(&mut self.s, "["));
|
||||||
|
@ -880,7 +880,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
ast::ItemTy(ref ty, ref params) => {
|
ast::ItemTy(ref ty, ref params) => {
|
||||||
try!(self.ibox(indent_unit));
|
try!(self.ibox(indent_unit));
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[]));
|
try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[]));
|
||||||
try!(self.print_ident(item.ident));
|
try!(self.print_ident(item.ident));
|
||||||
try!(self.print_generics(params));
|
try!(self.print_generics(params));
|
||||||
|
@ -1277,7 +1277,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
pub fn print_outer_attributes(&mut self,
|
pub fn print_outer_attributes(&mut self,
|
||||||
attrs: &[ast::Attribute]) -> IoResult<()> {
|
attrs: &[ast::Attribute]) -> IoResult<()> {
|
||||||
let mut count = 0us;
|
let mut count = 0;
|
||||||
for attr in attrs.iter() {
|
for attr in attrs.iter() {
|
||||||
match attr.node.style {
|
match attr.node.style {
|
||||||
ast::AttrOuter => {
|
ast::AttrOuter => {
|
||||||
|
@ -1295,7 +1295,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
pub fn print_inner_attributes(&mut self,
|
pub fn print_inner_attributes(&mut self,
|
||||||
attrs: &[ast::Attribute]) -> IoResult<()> {
|
attrs: &[ast::Attribute]) -> IoResult<()> {
|
||||||
let mut count = 0us;
|
let mut count = 0;
|
||||||
for attr in attrs.iter() {
|
for attr in attrs.iter() {
|
||||||
match attr.node.style {
|
match attr.node.style {
|
||||||
ast::AttrInner => {
|
ast::AttrInner => {
|
||||||
|
@ -1416,8 +1416,8 @@ impl<'a> State<'a> {
|
||||||
match _else.node {
|
match _else.node {
|
||||||
// "another else-if"
|
// "another else-if"
|
||||||
ast::ExprIf(ref i, ref then, ref e) => {
|
ast::ExprIf(ref i, ref then, ref e) => {
|
||||||
try!(self.cbox(indent_unit - 1us));
|
try!(self.cbox(indent_unit - 1));
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
try!(word(&mut self.s, " else if "));
|
try!(word(&mut self.s, " else if "));
|
||||||
try!(self.print_expr(&**i));
|
try!(self.print_expr(&**i));
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
|
@ -1426,8 +1426,8 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
// "another else-if-let"
|
// "another else-if-let"
|
||||||
ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => {
|
ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => {
|
||||||
try!(self.cbox(indent_unit - 1us));
|
try!(self.cbox(indent_unit - 1));
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
try!(word(&mut self.s, " else if let "));
|
try!(word(&mut self.s, " else if let "));
|
||||||
try!(self.print_pat(&**pat));
|
try!(self.print_pat(&**pat));
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
|
@ -1439,8 +1439,8 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
// "final else"
|
// "final else"
|
||||||
ast::ExprBlock(ref b) => {
|
ast::ExprBlock(ref b) => {
|
||||||
try!(self.cbox(indent_unit - 1us));
|
try!(self.cbox(indent_unit - 1));
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
try!(word(&mut self.s, " else "));
|
try!(word(&mut self.s, " else "));
|
||||||
self.print_block(&**b)
|
self.print_block(&**b)
|
||||||
}
|
}
|
||||||
|
@ -1606,7 +1606,7 @@ impl<'a> State<'a> {
|
||||||
try!(self.print_expr(&*args[0]));
|
try!(self.print_expr(&*args[0]));
|
||||||
try!(word(&mut self.s, "."));
|
try!(word(&mut self.s, "."));
|
||||||
try!(self.print_ident(ident.node));
|
try!(self.print_ident(ident.node));
|
||||||
if tys.len() > 0us {
|
if tys.len() > 0 {
|
||||||
try!(word(&mut self.s, "::<"));
|
try!(word(&mut self.s, "::<"));
|
||||||
try!(self.commasep(Inconsistent, tys,
|
try!(self.commasep(Inconsistent, tys,
|
||||||
|s, ty| s.print_type(&**ty)));
|
|s, ty| s.print_type(&**ty)));
|
||||||
|
@ -1777,7 +1777,7 @@ impl<'a> State<'a> {
|
||||||
// containing cbox, will be closed by print-block at }
|
// containing cbox, will be closed by print-block at }
|
||||||
try!(self.cbox(indent_unit));
|
try!(self.cbox(indent_unit));
|
||||||
// head-box, will be closed by print-block after {
|
// head-box, will be closed by print-block after {
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
try!(self.print_block(&**blk));
|
try!(self.print_block(&**blk));
|
||||||
}
|
}
|
||||||
ast::ExprAssign(ref lhs, ref rhs) => {
|
ast::ExprAssign(ref lhs, ref rhs) => {
|
||||||
|
@ -2154,7 +2154,7 @@ impl<'a> State<'a> {
|
||||||
},
|
},
|
||||||
|f| f.node.pat.span));
|
|f| f.node.pat.span));
|
||||||
if etc {
|
if etc {
|
||||||
if fields.len() != 0us { try!(self.word_space(",")); }
|
if fields.len() != 0 { try!(self.word_space(",")); }
|
||||||
try!(word(&mut self.s, ".."));
|
try!(word(&mut self.s, ".."));
|
||||||
}
|
}
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
|
@ -2221,7 +2221,7 @@ impl<'a> State<'a> {
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
}
|
}
|
||||||
try!(self.cbox(indent_unit));
|
try!(self.cbox(indent_unit));
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
try!(self.print_outer_attributes(&arm.attrs[]));
|
try!(self.print_outer_attributes(&arm.attrs[]));
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for p in arm.pats.iter() {
|
for p in arm.pats.iter() {
|
||||||
|
@ -2307,7 +2307,7 @@ impl<'a> State<'a> {
|
||||||
-> IoResult<()> {
|
-> IoResult<()> {
|
||||||
// It is unfortunate to duplicate the commasep logic, but we want the
|
// It is unfortunate to duplicate the commasep logic, but we want the
|
||||||
// self type and the args all in the same box.
|
// self type and the args all in the same box.
|
||||||
try!(self.rbox(0us, Inconsistent));
|
try!(self.rbox(0, Inconsistent));
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for &explicit_self in opt_explicit_self.iter() {
|
for &explicit_self in opt_explicit_self.iter() {
|
||||||
let m = match explicit_self {
|
let m = match explicit_self {
|
||||||
|
@ -2457,7 +2457,7 @@ impl<'a> State<'a> {
|
||||||
try!(word(&mut self.s, "<"));
|
try!(word(&mut self.s, "<"));
|
||||||
|
|
||||||
let mut ints = Vec::new();
|
let mut ints = Vec::new();
|
||||||
for i in 0us..total {
|
for i in 0..total {
|
||||||
ints.push(i);
|
ints.push(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2707,7 +2707,7 @@ impl<'a> State<'a> {
|
||||||
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
|
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
|
||||||
span_line.line == comment_line.line {
|
span_line.line == comment_line.line {
|
||||||
try!(self.print_comment(cmnt));
|
try!(self.print_comment(cmnt));
|
||||||
self.cur_cmnt_and_lit.cur_cmnt += 1us;
|
self.cur_cmnt_and_lit.cur_cmnt += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => ()
|
_ => ()
|
||||||
|
@ -2725,7 +2725,7 @@ impl<'a> State<'a> {
|
||||||
match self.next_comment() {
|
match self.next_comment() {
|
||||||
Some(ref cmnt) => {
|
Some(ref cmnt) => {
|
||||||
try!(self.print_comment(cmnt));
|
try!(self.print_comment(cmnt));
|
||||||
self.cur_cmnt_and_lit.cur_cmnt += 1us;
|
self.cur_cmnt_and_lit.cur_cmnt += 1;
|
||||||
}
|
}
|
||||||
_ => break
|
_ => break
|
||||||
}
|
}
|
||||||
|
@ -2807,7 +2807,7 @@ impl<'a> State<'a> {
|
||||||
while self.cur_cmnt_and_lit.cur_lit < lits.len() {
|
while self.cur_cmnt_and_lit.cur_lit < lits.len() {
|
||||||
let ltrl = (*lits)[self.cur_cmnt_and_lit.cur_lit].clone();
|
let ltrl = (*lits)[self.cur_cmnt_and_lit.cur_lit].clone();
|
||||||
if ltrl.pos > pos { return None; }
|
if ltrl.pos > pos { return None; }
|
||||||
self.cur_cmnt_and_lit.cur_lit += 1us;
|
self.cur_cmnt_and_lit.cur_lit += 1;
|
||||||
if ltrl.pos == pos { return Some(ltrl); }
|
if ltrl.pos == pos { return Some(ltrl); }
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -2822,7 +2822,7 @@ impl<'a> State<'a> {
|
||||||
Some(ref cmnt) => {
|
Some(ref cmnt) => {
|
||||||
if (*cmnt).pos < pos {
|
if (*cmnt).pos < pos {
|
||||||
try!(self.print_comment(cmnt));
|
try!(self.print_comment(cmnt));
|
||||||
self.cur_cmnt_and_lit.cur_cmnt += 1us;
|
self.cur_cmnt_and_lit.cur_cmnt += 1;
|
||||||
} else { break; }
|
} else { break; }
|
||||||
}
|
}
|
||||||
_ => break
|
_ => break
|
||||||
|
@ -2835,7 +2835,7 @@ impl<'a> State<'a> {
|
||||||
cmnt: &comments::Comment) -> IoResult<()> {
|
cmnt: &comments::Comment) -> IoResult<()> {
|
||||||
match cmnt.style {
|
match cmnt.style {
|
||||||
comments::Mixed => {
|
comments::Mixed => {
|
||||||
assert_eq!(cmnt.lines.len(), 1us);
|
assert_eq!(cmnt.lines.len(), 1);
|
||||||
try!(zerobreak(&mut self.s));
|
try!(zerobreak(&mut self.s));
|
||||||
try!(word(&mut self.s, &cmnt.lines[0][]));
|
try!(word(&mut self.s, &cmnt.lines[0][]));
|
||||||
zerobreak(&mut self.s)
|
zerobreak(&mut self.s)
|
||||||
|
@ -2854,11 +2854,11 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
comments::Trailing => {
|
comments::Trailing => {
|
||||||
try!(word(&mut self.s, " "));
|
try!(word(&mut self.s, " "));
|
||||||
if cmnt.lines.len() == 1us {
|
if cmnt.lines.len() == 1 {
|
||||||
try!(word(&mut self.s, &cmnt.lines[0][]));
|
try!(word(&mut self.s, &cmnt.lines[0][]));
|
||||||
hardbreak(&mut self.s)
|
hardbreak(&mut self.s)
|
||||||
} else {
|
} else {
|
||||||
try!(self.ibox(0us));
|
try!(self.ibox(0));
|
||||||
for line in cmnt.lines.iter() {
|
for line in cmnt.lines.iter() {
|
||||||
if !line.is_empty() {
|
if !line.is_empty() {
|
||||||
try!(word(&mut self.s, &line[]));
|
try!(word(&mut self.s, &line[]));
|
||||||
|
|
|
@ -356,8 +356,8 @@ fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
|
||||||
let tparm_cnt = generics.ty_params.len();
|
let tparm_cnt = generics.ty_params.len();
|
||||||
// NB: inadequate check, but we're running
|
// NB: inadequate check, but we're running
|
||||||
// well before resolve, can't get too deep.
|
// well before resolve, can't get too deep.
|
||||||
input_cnt == 1us
|
input_cnt == 1
|
||||||
&& no_output && tparm_cnt == 0us
|
&& no_output && tparm_cnt == 0
|
||||||
}
|
}
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue