1
Fork 0

Work towards a non-panicing parser (libsyntax)

- Functions in parser.rs return PResult<> rather than panicing
- Other functions in libsyntax call panic! explicitly for now if they rely on panicing behaviour.
- 'panictry!' macro added as scaffolding while converting panicing functions.
  (This does the same as 'unwrap()' but is easier to grep for and turn into try!())
- Leaves panicing wrappers for the following functions so that the
  quote_* macros behave the same:
  - parse_expr, parse_item, parse_pat, parse_arm, parse_ty, parse_stmt
This commit is contained in:
Phil Dawes 2015-03-28 21:58:51 +00:00
parent f73f3233f1
commit b2bcb7229a
23 changed files with 1412 additions and 1315 deletions

View file

@ -528,7 +528,10 @@ impl<'a> CrateReader<'a> {
source_name.clone(), source_name.clone(),
body); body);
let lo = p.span.lo; let lo = p.span.lo;
let body = p.parse_all_token_trees(); let body = match p.parse_all_token_trees() {
Ok(body) => body,
Err(err) => panic!(err),
};
let span = mk_sp(lo, p.last_span.hi); let span = mk_sp(lo, p.last_span.hi);
p.abort_if_errors(); p.abort_if_errors();
macros.push(ast::MacroDef { macros.push(ast::MacroDef {

View file

@ -68,13 +68,13 @@ impl Session {
if self.opts.treat_err_as_bug { if self.opts.treat_err_as_bug {
self.span_bug(sp, msg); self.span_bug(sp, msg);
} }
self.diagnostic().span_fatal(sp, msg) panic!(self.diagnostic().span_fatal(sp, msg))
} }
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! { pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
if self.opts.treat_err_as_bug { if self.opts.treat_err_as_bug {
self.span_bug(sp, msg); self.span_bug(sp, msg);
} }
self.diagnostic().span_fatal_with_code(sp, msg, code) panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
} }
pub fn fatal(&self, msg: &str) -> ! { pub fn fatal(&self, msg: &str) -> ! {
if self.opts.treat_err_as_bug { if self.opts.treat_err_as_bug {

View file

@ -503,8 +503,8 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) {
let name = meta.name(); let name = meta.name();
if !set.insert(name.clone()) { if !set.insert(name.clone()) {
diagnostic.span_fatal(meta.span, panic!(diagnostic.span_fatal(meta.span,
&format!("duplicate meta item `{}`", name)); &format!("duplicate meta item `{}`", name)));
} }
} }
} }

View file

@ -72,6 +72,7 @@ pub trait Emitter {
/// from the diagnostics. You can use this with the `Any` trait to figure out /// from the diagnostics. You can use this with the `Any` trait to figure out
/// how a rustc task died (if so desired). /// how a rustc task died (if so desired).
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
#[must_use]
pub struct FatalError; pub struct FatalError;
/// Signifies that the compiler died with an explicit call to `.bug` /// Signifies that the compiler died with an explicit call to `.bug`
@ -88,13 +89,13 @@ pub struct SpanHandler {
} }
impl SpanHandler { impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! { pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal); self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
panic!(FatalError); return FatalError;
} }
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! { pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal); self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
panic!(FatalError); return FatalError;
} }
pub fn span_err(&self, sp: Span, msg: &str) { pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error); self.handler.emit(Some((&self.cm, sp)), msg, Error);

View file

@ -23,6 +23,17 @@ use parse::token::InternedString;
use parse::token; use parse::token;
use ptr::P; use ptr::P;
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
match $e {
Ok(e) => e,
Err(e) => panic!(e),
}
})
}
enum State { enum State {
Asm, Asm,
Outputs, Outputs,
@ -91,16 +102,16 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
p.token != token::ModSep { p.token != token::ModSep {
if outputs.len() != 0 { if outputs.len() != 0 {
p.eat(&token::Comma); panictry!(p.eat(&token::Comma));
} }
let (constraint, _str_style) = p.parse_str(); let (constraint, _str_style) = panictry!(p.parse_str());
let span = p.last_span; let span = p.last_span;
p.expect(&token::OpenDelim(token::Paren)); panictry!(p.expect(&token::OpenDelim(token::Paren)));
let out = p.parse_expr(); let out = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren)); panictry!(p.expect(&token::CloseDelim(token::Paren)));
// Expands a read+write operand into two operands. // Expands a read+write operand into two operands.
// //
@ -131,10 +142,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
p.token != token::ModSep { p.token != token::ModSep {
if inputs.len() != 0 { if inputs.len() != 0 {
p.eat(&token::Comma); panictry!(p.eat(&token::Comma));
} }
let (constraint, _str_style) = p.parse_str(); let (constraint, _str_style) = panictry!(p.parse_str());
if constraint.starts_with("=") { if constraint.starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='"); cx.span_err(p.last_span, "input operand constraint contains '='");
@ -142,9 +153,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.span_err(p.last_span, "input operand constraint contains '+'"); cx.span_err(p.last_span, "input operand constraint contains '+'");
} }
p.expect(&token::OpenDelim(token::Paren)); panictry!(p.expect(&token::OpenDelim(token::Paren)));
let input = p.parse_expr(); let input = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren)); panictry!(p.expect(&token::CloseDelim(token::Paren)));
inputs.push((constraint, input)); inputs.push((constraint, input));
} }
@ -155,10 +166,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
p.token != token::ModSep { p.token != token::ModSep {
if clobs.len() != 0 { if clobs.len() != 0 {
p.eat(&token::Comma); panictry!(p.eat(&token::Comma));
} }
let (s, _str_style) = p.parse_str(); let (s, _str_style) = panictry!(p.parse_str());
if OPTIONS.iter().any(|&opt| s == opt) { if OPTIONS.iter().any(|&opt| s == opt) {
cx.span_warn(p.last_span, "expected a clobber, found an option"); cx.span_warn(p.last_span, "expected a clobber, found an option");
@ -167,7 +178,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
} }
} }
Options => { Options => {
let (option, _str_style) = p.parse_str(); let (option, _str_style) = panictry!(p.parse_str());
if option == "volatile" { if option == "volatile" {
// Indicates that the inline assembly has side effects // Indicates that the inline assembly has side effects
@ -182,7 +193,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
} }
if p.token == token::Comma { if p.token == token::Comma {
p.eat(&token::Comma); panictry!(p.eat(&token::Comma));
} }
} }
StateNone => () StateNone => ()
@ -194,12 +205,12 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
match (&p.token, state.next(), state.next().next()) { match (&p.token, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) | (&token::Colon, StateNone, _) |
(&token::ModSep, _, StateNone) => { (&token::ModSep, _, StateNone) => {
p.bump(); panictry!(p.bump());
break 'statement; break 'statement;
} }
(&token::Colon, st, _) | (&token::Colon, st, _) |
(&token::ModSep, _, st) => { (&token::ModSep, _, st) => {
p.bump(); panictry!(p.bump());
state = st; state = st;
} }
(&token::Eof, _, _) => break 'statement, (&token::Eof, _, _) => break 'statement,

View file

@ -652,9 +652,9 @@ impl<'a> ExtCtxt<'a> {
pub fn bt_push(&mut self, ei: ExpnInfo) { pub fn bt_push(&mut self, ei: ExpnInfo) {
self.recursion_count += 1; self.recursion_count += 1;
if self.recursion_count > self.ecfg.recursion_limit { if self.recursion_count > self.ecfg.recursion_limit {
self.span_fatal(ei.call_site, panic!(self.span_fatal(ei.call_site,
&format!("recursion limit reached while expanding the macro `{}`", &format!("recursion limit reached while expanding the macro `{}`",
ei.callee.name)); ei.callee.name)));
} }
let mut call_site = ei.call_site; let mut call_site = ei.call_site;
@ -699,7 +699,7 @@ impl<'a> ExtCtxt<'a> {
/// value doesn't have to match anything) /// value doesn't have to match anything)
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! { pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.span_fatal(sp, msg); panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg));
} }
/// Emit `msg` attached to `sp`, without immediately stopping /// Emit `msg` attached to `sp`, without immediately stopping
@ -817,7 +817,7 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
let mut es = Vec::new(); let mut es = Vec::new();
while p.token != token::Eof { while p.token != token::Eof {
es.push(cx.expander().fold_expr(p.parse_expr())); es.push(cx.expander().fold_expr(p.parse_expr()));
if p.eat(&token::Comma) { if panictry!(p.eat(&token::Comma)){
continue; continue;
} }
if p.token != token::Eof { if p.token != token::Eof {

View file

@ -29,7 +29,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
let mut p = cx.new_parser_from_tts(tts); let mut p = cx.new_parser_from_tts(tts);
let cfg = p.parse_meta_item(); let cfg = p.parse_meta_item();
if !p.eat(&token::Eof) { if !panictry!(p.eat(&token::Eof)){
cx.span_err(sp, "expected 1 cfg-pattern"); cx.span_err(sp, "expected 1 cfg-pattern");
return DummyResult::expr(sp); return DummyResult::expr(sp);
} }

View file

@ -1684,7 +1684,7 @@ mod test {
fn expand_crate_str(crate_str: String) -> ast::Crate { fn expand_crate_str(crate_str: String) -> ast::Crate {
let ps = parse::new_parse_sess(); let ps = parse::new_parse_sess();
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod(); let crate_ast = panictry!(string_to_parser(&ps, crate_str).parse_crate_mod());
// the cfg argument actually does matter, here... // the cfg argument actually does matter, here...
expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast) expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast)
} }

View file

@ -24,6 +24,17 @@ use ptr::P;
use std::collections::HashMap; use std::collections::HashMap;
use std::iter::repeat; use std::iter::repeat;
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
match $e {
Ok(e) => e,
Err(e) => { panic!(e); }
}
})
}
#[derive(PartialEq)] #[derive(PartialEq)]
enum ArgumentType { enum ArgumentType {
Known(String), Known(String),
@ -92,7 +103,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let fmtstr = p.parse_expr(); let fmtstr = p.parse_expr();
let mut named = false; let mut named = false;
while p.token != token::Eof { while p.token != token::Eof {
if !p.eat(&token::Comma) { if !panictry!(p.eat(&token::Comma)) {
ecx.span_err(sp, "expected token: `,`"); ecx.span_err(sp, "expected token: `,`");
return None; return None;
} }
@ -101,7 +112,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
named = true; named = true;
let ident = match p.token { let ident = match p.token {
token::Ident(i, _) => { token::Ident(i, _) => {
p.bump(); panictry!(p.bump());
i i
} }
_ if named => { _ if named => {
@ -120,7 +131,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let interned_name = token::get_ident(ident); let interned_name = token::get_ident(ident);
let name = &interned_name[..]; let name = &interned_name[..];
p.expect(&token::Eq); panictry!(p.expect(&token::Eq));
let e = p.parse_expr(); let e = p.parse_expr();
match names.get(name) { match names.get(name) {
None => {} None => {}

View file

@ -781,11 +781,11 @@ fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree])
p.quote_depth += 1; p.quote_depth += 1;
let cx_expr = p.parse_expr(); let cx_expr = p.parse_expr();
if !p.eat(&token::Comma) { if !panictry!(p.eat(&token::Comma)) {
p.fatal("expected token `,`"); panic!(p.fatal("expected token `,`"));
} }
let tts = p.parse_all_token_trees(); let tts = panictry!(p.parse_all_token_trees());
p.abort_if_errors(); p.abort_if_errors();
(cx_expr, tts) (cx_expr, tts)

View file

@ -117,11 +117,11 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree
while self.p.token != token::Eof { while self.p.token != token::Eof {
match self.p.parse_item() { match self.p.parse_item() {
Some(item) => ret.push(item), Some(item) => ret.push(item),
None => self.p.span_fatal( None => panic!(self.p.span_fatal(
self.p.span, self.p.span,
&format!("expected item, found `{}`", &format!("expected item, found `{}`",
self.p.this_token_to_string()) self.p.this_token_to_string())
) ))
} }
} }
Some(ret) Some(ret)

View file

@ -226,10 +226,10 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
} }
Occupied(..) => { Occupied(..) => {
let string = token::get_ident(bind_name); let string = token::get_ident(bind_name);
p_s.span_diagnostic panic!(p_s.span_diagnostic
.span_fatal(sp, .span_fatal(sp,
&format!("duplicated bind name: {}", &format!("duplicated bind name: {}",
&string)) &string)))
} }
} }
} }
@ -260,10 +260,10 @@ pub fn parse_or_else(sess: &ParseSess,
match parse(sess, cfg, rdr, &ms[..]) { match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m, Success(m) => m,
Failure(sp, str) => { Failure(sp, str) => {
sess.span_diagnostic.span_fatal(sp, &str[..]) panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
} }
Error(sp, str) => { Error(sp, str) => {
sess.span_diagnostic.span_fatal(sp, &str[..]) panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
} }
} }
} }
@ -512,46 +512,46 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
match name { match name {
"tt" => { "tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful p.quote_depth += 1; //but in theory, non-quoted tts might be useful
let res = token::NtTT(P(p.parse_token_tree())); let res = token::NtTT(P(panictry!(p.parse_token_tree())));
p.quote_depth -= 1; p.quote_depth -= 1;
return res; return res;
} }
_ => {} _ => {}
} }
// check at the beginning and the parser checks after each bump // check at the beginning and the parser checks after each bump
p.check_unknown_macro_variable(); panictry!(p.check_unknown_macro_variable());
match name { match name {
"item" => match p.parse_item() { "item" => match p.parse_item() {
Some(i) => token::NtItem(i), Some(i) => token::NtItem(i),
None => p.fatal("expected an item keyword") None => panic!(p.fatal("expected an item keyword"))
}, },
"block" => token::NtBlock(p.parse_block()), "block" => token::NtBlock(panictry!(p.parse_block())),
"stmt" => match p.parse_stmt() { "stmt" => match p.parse_stmt() {
Some(s) => token::NtStmt(s), Some(s) => token::NtStmt(s),
None => p.fatal("expected a statement") None => panic!(p.fatal("expected a statement"))
}, },
"pat" => token::NtPat(p.parse_pat()), "pat" => token::NtPat(p.parse_pat()),
"expr" => token::NtExpr(p.parse_expr()), "expr" => token::NtExpr(p.parse_expr()),
"ty" => token::NtTy(p.parse_ty()), "ty" => token::NtTy(p.parse_ty()),
// this could be handled like a token, since it is one // this could be handled like a token, since it is one
"ident" => match p.token { "ident" => match p.token {
token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) } token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(box sn,b) }
_ => { _ => {
let token_str = pprust::token_to_string(&p.token); let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", panic!(p.fatal(&format!("expected ident, found {}",
&token_str[..])) &token_str[..])))
} }
}, },
"path" => { "path" => {
token::NtPath(box p.parse_path(LifetimeAndTypesWithoutColons)) token::NtPath(box panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))
} }
"meta" => token::NtMeta(p.parse_meta_item()), "meta" => token::NtMeta(p.parse_meta_item()),
_ => { _ => {
p.span_fatal_help(sp, panic!(p.span_fatal_help(sp,
&format!("invalid fragment specifier `{}`", name), &format!("invalid fragment specifier `{}`", name),
"valid fragment specifiers are `ident`, `block`, \ "valid fragment specifiers are `ident`, `block`, \
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
and `item`") and `item`"))
} }
} }
} }

View file

@ -41,7 +41,7 @@ impl<'a> ParserAnyMacro<'a> {
fn ensure_complete_parse(&self, allow_semi: bool) { fn ensure_complete_parse(&self, allow_semi: bool) {
let mut parser = self.parser.borrow_mut(); let mut parser = self.parser.borrow_mut();
if allow_semi && parser.token == token::Semi { if allow_semi && parser.token == token::Semi {
parser.bump() panictry!(parser.bump())
} }
if parser.token != token::Eof { if parser.token != token::Eof {
let token_str = parser.this_token_to_string(); let token_str = parser.this_token_to_string();
@ -81,7 +81,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> {
let mut parser = self.parser.borrow_mut(); let mut parser = self.parser.borrow_mut();
match parser.token { match parser.token {
token::Eof => break, token::Eof => break,
_ => ret.push(parser.parse_impl_item()) _ => ret.push(panictry!(parser.parse_impl_item()))
} }
} }
self.ensure_complete_parse(false); self.ensure_complete_parse(false);
@ -142,7 +142,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref lhs_tt)) => { MatchedNonterminal(NtTT(ref lhs_tt)) => {
let lhs_tt = match **lhs_tt { let lhs_tt = match **lhs_tt {
TtDelimited(_, ref delim) => &delim.tts[..], TtDelimited(_, ref delim) => &delim.tts[..],
_ => cx.span_fatal(sp, "malformed macro lhs") _ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
}; };
match TokenTree::parse(cx, lhs_tt, arg) { match TokenTree::parse(cx, lhs_tt, arg) {
@ -153,7 +153,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
match **tt { match **tt {
// ignore delimiters // ignore delimiters
TtDelimited(_, ref delimed) => delimed.tts.clone(), TtDelimited(_, ref delimed) => delimed.tts.clone(),
_ => cx.span_fatal(sp, "macro rhs must be delimited"), _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
} }
}, },
_ => cx.span_bug(sp, "bad thing in rhs") _ => cx.span_bug(sp, "bad thing in rhs")
@ -164,7 +164,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
imported_from, imported_from,
rhs); rhs);
let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr)); let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr));
p.check_unknown_macro_variable(); panictry!(p.check_unknown_macro_variable());
// Let the context choose how to interpret the result. // Let the context choose how to interpret the result.
// Weird, but useful for X-macros. // Weird, but useful for X-macros.
return box ParserAnyMacro { return box ParserAnyMacro {
@ -175,13 +175,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
best_fail_spot = sp; best_fail_spot = sp;
best_fail_msg = (*msg).clone(); best_fail_msg = (*msg).clone();
}, },
Error(sp, ref msg) => cx.span_fatal(sp, &msg[..]) Error(sp, ref msg) => panic!(cx.span_fatal(sp, &msg[..]))
} }
} }
_ => cx.bug("non-matcher found in parsed lhses") _ => cx.bug("non-matcher found in parsed lhses")
} }
} }
cx.span_fatal(best_fail_spot, &best_fail_msg[..]); panic!(cx.span_fatal(best_fail_spot, &best_fail_msg[..]));
} }
// Note that macro-by-example's input is also matched against a token tree: // Note that macro-by-example's input is also matched against a token tree:

View file

@ -247,22 +247,22 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
match lockstep_iter_size(&TtSequence(sp, seq.clone()), match lockstep_iter_size(&TtSequence(sp, seq.clone()),
r) { r) {
LisUnconstrained => { LisUnconstrained => {
r.sp_diag.span_fatal( panic!(r.sp_diag.span_fatal(
sp.clone(), /* blame macro writer */ sp.clone(), /* blame macro writer */
"attempted to repeat an expression \ "attempted to repeat an expression \
containing no syntax \ containing no syntax \
variables matched as repeating at this depth"); variables matched as repeating at this depth"));
} }
LisContradiction(ref msg) => { LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead // FIXME #2887 blame macro invoker instead
r.sp_diag.span_fatal(sp.clone(), &msg[..]); panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
} }
LisConstraint(len, _) => { LisConstraint(len, _) => {
if len == 0 { if len == 0 {
if seq.op == ast::OneOrMore { if seq.op == ast::OneOrMore {
// FIXME #2887 blame invoker // FIXME #2887 blame invoker
r.sp_diag.span_fatal(sp.clone(), panic!(r.sp_diag.span_fatal(sp.clone(),
"this must repeat at least once"); "this must repeat at least once"));
} }
r.stack.last_mut().unwrap().idx += 1; r.stack.last_mut().unwrap().idx += 1;
@ -306,10 +306,10 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
return ret_val; return ret_val;
} }
MatchedSeq(..) => { MatchedSeq(..) => {
r.sp_diag.span_fatal( panic!(r.sp_diag.span_fatal(
r.cur_span, /* blame the macro writer */ r.cur_span, /* blame the macro writer */
&format!("variable '{:?}' is still repeating at this depth", &format!("variable '{:?}' is still repeating at this depth",
token::get_ident(ident))); token::get_ident(ident))));
} }
} }
} }

View file

@ -50,6 +50,17 @@ extern crate libc;
extern crate serialize as rustc_serialize; // used by deriving extern crate serialize as rustc_serialize; // used by deriving
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
match $e {
Ok(e) => e,
Err(e) => panic!(e)
}
})
}
pub mod util { pub mod util {
pub mod interner; pub mod interner;
#[cfg(test)] #[cfg(test)]

View file

@ -45,10 +45,10 @@ impl<'a> ParserAttr for Parser<'a> {
self.span.hi self.span.hi
); );
if attr.node.style != ast::AttrOuter { if attr.node.style != ast::AttrOuter {
self.fatal("expected outer comment"); panic!(self.fatal("expected outer comment"));
} }
attrs.push(attr); attrs.push(attr);
self.bump(); panictry!(self.bump());
} }
_ => break _ => break
} }
@ -66,11 +66,11 @@ impl<'a> ParserAttr for Parser<'a> {
let (span, value, mut style) = match self.token { let (span, value, mut style) = match self.token {
token::Pound => { token::Pound => {
let lo = self.span.lo; let lo = self.span.lo;
self.bump(); panictry!(self.bump());
if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); } if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); }
let style = if self.token == token::Not { let style = if self.token == token::Not {
self.bump(); panictry!(self.bump());
if !permit_inner { if !permit_inner {
let span = self.span; let span = self.span;
self.span_err(span, self.span_err(span,
@ -84,21 +84,21 @@ impl<'a> ParserAttr for Parser<'a> {
ast::AttrOuter ast::AttrOuter
}; };
self.expect(&token::OpenDelim(token::Bracket)); panictry!(self.expect(&token::OpenDelim(token::Bracket)));
let meta_item = self.parse_meta_item(); let meta_item = self.parse_meta_item();
let hi = self.span.hi; let hi = self.span.hi;
self.expect(&token::CloseDelim(token::Bracket)); panictry!(self.expect(&token::CloseDelim(token::Bracket)));
(mk_sp(lo, hi), meta_item, style) (mk_sp(lo, hi), meta_item, style)
} }
_ => { _ => {
let token_str = self.this_token_to_string(); let token_str = self.this_token_to_string();
self.fatal(&format!("expected `#`, found `{}`", token_str)); panic!(self.fatal(&format!("expected `#`, found `{}`", token_str)));
} }
}; };
if permit_inner && self.token == token::Semi { if permit_inner && self.token == token::Semi {
self.bump(); panictry!(self.bump());
self.span_warn(span, "this inner attribute syntax is deprecated. \ self.span_warn(span, "this inner attribute syntax is deprecated. \
The new syntax is `#![foo]`, with a bang and no semicolon"); The new syntax is `#![foo]`, with a bang and no semicolon");
style = ast::AttrInner; style = ast::AttrInner;
@ -142,7 +142,7 @@ impl<'a> ParserAttr for Parser<'a> {
lo, hi); lo, hi);
if attr.node.style == ast::AttrInner { if attr.node.style == ast::AttrInner {
attrs.push(attr); attrs.push(attr);
self.bump(); panictry!(self.bump());
} else { } else {
break; break;
} }
@ -166,19 +166,19 @@ impl<'a> ParserAttr for Parser<'a> {
match nt_meta { match nt_meta {
Some(meta) => { Some(meta) => {
self.bump(); panictry!(self.bump());
return meta; return meta;
} }
None => {} None => {}
} }
let lo = self.span.lo; let lo = self.span.lo;
let ident = self.parse_ident(); let ident = panictry!(self.parse_ident());
let name = self.id_to_interned_str(ident); let name = self.id_to_interned_str(ident);
match self.token { match self.token {
token::Eq => { token::Eq => {
self.bump(); panictry!(self.bump());
let lit = self.parse_lit(); let lit = panictry!(self.parse_lit());
// FIXME #623 Non-string meta items are not serialized correctly; // FIXME #623 Non-string meta items are not serialized correctly;
// just forbid them for now // just forbid them for now
match lit.node { match lit.node {
@ -206,10 +206,10 @@ impl<'a> ParserAttr for Parser<'a> {
/// matches meta_seq = ( COMMASEP(meta_item) ) /// matches meta_seq = ( COMMASEP(meta_item) )
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> { fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
self.parse_seq(&token::OpenDelim(token::Paren), panictry!(self.parse_seq(&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren), &token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma), seq_sep_trailing_allowed(token::Comma),
|p| p.parse_meta_item()).node |p| Ok(p.parse_meta_item()))).node
} }
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> { fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {

View file

@ -116,7 +116,7 @@ impl<'a> Reader for TtReader<'a> {
r r
} }
fn fatal(&self, m: &str) -> ! { fn fatal(&self, m: &str) -> ! {
self.sp_diag.span_fatal(self.cur_span, m); panic!(self.sp_diag.span_fatal(self.cur_span, m));
} }
fn err(&self, m: &str) { fn err(&self, m: &str) {
self.sp_diag.span_err(self.cur_span, m); self.sp_diag.span_err(self.cur_span, m);
@ -181,7 +181,7 @@ impl<'a> StringReader<'a> {
/// Report a fatal lexical error with a given span. /// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> ! { pub fn fatal_span(&self, sp: Span, m: &str) -> ! {
self.span_diagnostic.span_fatal(sp, m) panic!(self.span_diagnostic.span_fatal(sp, m))
} }
/// Report a lexical error with a given span. /// Report a lexical error with a given span.

View file

@ -12,11 +12,12 @@
use ast; use ast;
use codemap::{Span, CodeMap, FileMap}; use codemap::{Span, CodeMap, FileMap};
use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto}; use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError};
use parse::attr::ParserAttr; use parse::attr::ParserAttr;
use parse::parser::Parser; use parse::parser::Parser;
use ptr::P; use ptr::P;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
@ -27,6 +28,8 @@ use std::path::{Path, PathBuf};
use std::rc::Rc; use std::rc::Rc;
use std::str; use std::str;
pub type PResult<T> = Result<T, FatalError>;
#[macro_use] #[macro_use]
pub mod parser; pub mod parser;
@ -88,7 +91,7 @@ pub fn parse_crate_from_file(
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess sess: &ParseSess
) -> ast::Crate { ) -> ast::Crate {
new_parser_from_file(sess, cfg, input).parse_crate_mod() panictry!(new_parser_from_file(sess, cfg, input).parse_crate_mod())
// why is there no p.abort_if_errors here? // why is there no p.abort_if_errors here?
} }
@ -109,7 +112,7 @@ pub fn parse_crate_from_source_str(name: String,
cfg, cfg,
name, name,
source); source);
maybe_aborted(p.parse_crate_mod(),p) maybe_aborted(panictry!(p.parse_crate_mod()),p)
} }
pub fn parse_crate_attrs_from_source_str(name: String, pub fn parse_crate_attrs_from_source_str(name: String,
@ -182,7 +185,7 @@ pub fn parse_tts_from_source_str(name: String,
); );
p.quote_depth += 1; p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees. // right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p) maybe_aborted(panictry!(p.parse_all_token_trees()),p)
} }
// Note: keep in sync with `with_hygiene::new_parser_from_source_str` // Note: keep in sync with `with_hygiene::new_parser_from_source_str`
@ -245,7 +248,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-> Rc<FileMap> { -> Rc<FileMap> {
let err = |msg: &str| { let err = |msg: &str| {
match spanopt { match spanopt {
Some(sp) => sess.span_diagnostic.span_fatal(sp, msg), Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, msg)),
None => sess.span_diagnostic.handler().fatal(msg), None => sess.span_diagnostic.handler().fatal(msg),
} }
}; };
@ -286,7 +289,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
let cfg = Vec::new(); let cfg = Vec::new();
let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap); let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr); let mut p1 = Parser::new(sess, cfg, box srdr);
p1.parse_all_token_trees() panictry!(p1.parse_all_token_trees())
} }
/// Given tts and cfg, produce a parser /// Given tts and cfg, produce a parser
@ -295,7 +298,7 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig) -> Parser<'a> { cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts); let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
let mut p = Parser::new(sess, cfg, box trdr); let mut p = Parser::new(sess, cfg, box trdr);
p.check_unknown_macro_variable(); panictry!(p.check_unknown_macro_variable());
p p
} }
@ -325,7 +328,7 @@ pub mod with_hygiene {
); );
p.quote_depth += 1; p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees. // right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p) maybe_aborted(panictry!(p.parse_all_token_trees()),p)
} }
// Note: keep this in sync with `super::new_parser_from_source_str` until // Note: keep this in sync with `super::new_parser_from_source_str` until
@ -358,7 +361,7 @@ pub mod with_hygiene {
let cfg = Vec::new(); let cfg = Vec::new();
let srdr = make_reader(&sess.span_diagnostic, filemap); let srdr = make_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr); let mut p1 = Parser::new(sess, cfg, box srdr);
p1.parse_all_token_trees() panictry!(p1.parse_all_token_trees())
} }
} }
@ -964,7 +967,7 @@ mod test {
#[test] fn parse_ident_pat () { #[test] fn parse_ident_pat () {
let sess = new_parse_sess(); let sess = new_parse_sess();
let mut parser = string_to_parser(&sess, "b".to_string()); let mut parser = string_to_parser(&sess, "b".to_string());
assert!(parser.parse_pat() assert!(panictry!(parser.parse_pat_nopanic())
== P(ast::Pat{ == P(ast::Pat{
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(ast::BindByValue(ast::MutImmutable), node: ast::PatIdent(ast::BindByValue(ast::MutImmutable),

View file

@ -100,7 +100,7 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
fn eat_obsolete_ident(&mut self, ident: &str) -> bool { fn eat_obsolete_ident(&mut self, ident: &str) -> bool {
if self.is_obsolete_ident(ident) { if self.is_obsolete_ident(ident) {
self.bump(); panictry!(self.bump());
true true
} else { } else {
false false

File diff suppressed because it is too large Load diff

View file

@ -125,7 +125,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
match i.node { match i.node {
ast::ItemFn(_, ast::Unsafety::Unsafe, _, _, _) => { ast::ItemFn(_, ast::Unsafety::Unsafe, _, _, _) => {
let diag = self.cx.span_diagnostic; let diag = self.cx.span_diagnostic;
diag.span_fatal(i.span, "unsafe functions cannot be used for tests"); panic!(diag.span_fatal(i.span, "unsafe functions cannot be used for tests"));
} }
_ => { _ => {
debug!("this is a test function"); debug!("this is a test function");

View file

@ -44,7 +44,7 @@ fn with_error_checking_parse<T, F>(s: String, f: F) -> T where
/// Parse a string, return a crate. /// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate { pub fn string_to_crate (source_str : String) -> ast::Crate {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_crate_mod() panictry!(p.parse_crate_mod())
}) })
} }

View file

@ -17,6 +17,7 @@ extern crate syntax;
use syntax::ext::base::ExtCtxt; use syntax::ext::base::ExtCtxt;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::parse::PResult;
fn syntax_extension(cx: &ExtCtxt) { fn syntax_extension(cx: &ExtCtxt) {
let e_toks : Vec<syntax::ast::TokenTree> = quote_tokens!(cx, 1 + 2); let e_toks : Vec<syntax::ast::TokenTree> = quote_tokens!(cx, 1 + 2);