1
Fork 0

Auto merge of #29285 - eefriedman:libsyntax-panic, r=nrc

A set of commits which pushes some panics out of core parser methods, and into users of those parser methods.
This commit is contained in:
bors 2015-11-03 03:06:03 +00:00
commit b7fbfb658e
21 changed files with 174 additions and 190 deletions

View file

@ -657,15 +657,15 @@ pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config {
let target = match Target::search(&opts.target_triple) { let target = match Target::search(&opts.target_triple) {
Ok(t) => t, Ok(t) => t,
Err(e) => { Err(e) => {
sp.handler().fatal(&format!("Error loading target specification: {}", e)); panic!(sp.handler().fatal(&format!("Error loading target specification: {}", e)));
} }
}; };
let (int_type, uint_type) = match &target.target_pointer_width[..] { let (int_type, uint_type) = match &target.target_pointer_width[..] {
"32" => (ast::TyI32, ast::TyU32), "32" => (ast::TyI32, ast::TyU32),
"64" => (ast::TyI64, ast::TyU64), "64" => (ast::TyI64, ast::TyU64),
w => sp.handler().fatal(&format!("target specification was invalid: unrecognized \ w => panic!(sp.handler().fatal(&format!("target specification was invalid: \
target-pointer-width {}", w)) unrecognized target-pointer-width {}", w))),
}; };
Config { Config {

View file

@ -94,7 +94,7 @@ impl Session {
if self.opts.treat_err_as_bug { if self.opts.treat_err_as_bug {
self.bug(msg); self.bug(msg);
} }
self.diagnostic().handler().fatal(msg) panic!(self.diagnostic().handler().fatal(msg))
} }
pub fn span_err_or_warn(&self, is_warning: bool, sp: Span, msg: &str) { pub fn span_err_or_warn(&self, is_warning: bool, sp: Span, msg: &str) {
if is_warning { if is_warning {
@ -415,8 +415,8 @@ pub fn build_session_(sopts: config::Options,
let host = match Target::search(config::host_triple()) { let host = match Target::search(config::host_triple()) {
Ok(t) => t, Ok(t) => t,
Err(e) => { Err(e) => {
span_diagnostic.handler() panic!(span_diagnostic.handler()
.fatal(&format!("Error loading host specification: {}", e)); .fatal(&format!("Error loading host specification: {}", e)));
} }
}; };
let target_cfg = config::build_target_config(&sopts, &span_diagnostic); let target_cfg = config::build_target_config(&sopts, &span_diagnostic);

View file

@ -268,8 +268,10 @@ impl Target {
.map(|s| s.as_string()) .map(|s| s.as_string())
.and_then(|os| os.map(|s| s.to_string())) { .and_then(|os| os.map(|s| s.to_string())) {
Some(val) => val, Some(val) => val,
None => None => {
handler.fatal(&format!("Field {} in target specification is required", name)) panic!(handler.fatal(&format!("Field {} in target specification is required",
name)))
}
} }
}; };

View file

@ -38,14 +38,12 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! {
unsafe { unsafe {
let cstr = llvm::LLVMRustGetLastError(); let cstr = llvm::LLVMRustGetLastError();
if cstr == ptr::null() { if cstr == ptr::null() {
handler.fatal(&msg[..]); panic!(handler.fatal(&msg[..]));
} else { } else {
let err = CStr::from_ptr(cstr).to_bytes(); let err = CStr::from_ptr(cstr).to_bytes();
let err = String::from_utf8_lossy(err).to_string(); let err = String::from_utf8_lossy(err).to_string();
libc::free(cstr as *mut _); libc::free(cstr as *mut _);
handler.fatal(&format!("{}: {}", panic!(handler.fatal(&format!("{}: {}", &msg[..], &err[..])));
&msg[..],
&err[..]));
} }
} }
} }

View file

@ -206,13 +206,9 @@ impl Handler {
can_emit_warnings: can_emit_warnings can_emit_warnings: can_emit_warnings
} }
} }
pub fn fatal(&self, msg: &str) -> ! { pub fn fatal(&self, msg: &str) -> FatalError {
self.emit.borrow_mut().emit(None, msg, None, Fatal); self.emit.borrow_mut().emit(None, msg, None, Fatal);
FatalError
// Suppress the fatal error message from the panic below as we've
// already terminated in our own "legitimate" fashion.
io::set_panic(Box::new(io::sink()));
panic!(FatalError);
} }
pub fn err(&self, msg: &str) { pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error); self.emit.borrow_mut().emit(None, msg, None, Error);
@ -237,7 +233,8 @@ impl Handler {
self.err_count.get()); self.err_count.get());
} }
} }
self.fatal(&s[..]);
panic!(self.fatal(&s[..]));
} }
pub fn warn(&self, msg: &str) { pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning); self.emit.borrow_mut().emit(None, msg, None, Warning);

View file

@ -79,7 +79,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.span_err(sp, "malformed inline assembly"); cx.span_err(sp, "malformed inline assembly");
return DummyResult::expr(sp); return DummyResult::expr(sp);
} }
let (s, style) = match expr_to_string(cx, p.parse_expr(), let (s, style) = match expr_to_string(cx, panictry!(p.parse_expr_nopanic()),
"inline assembly must be a string literal") { "inline assembly must be a string literal") {
Some((s, st)) => (s, st), Some((s, st)) => (s, st),
// let compilation continue // let compilation continue
@ -102,7 +102,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let span = p.last_span; let span = p.last_span;
panictry!(p.expect(&token::OpenDelim(token::Paren))); panictry!(p.expect(&token::OpenDelim(token::Paren)));
let out = p.parse_expr(); let out = panictry!(p.parse_expr_nopanic());
panictry!(p.expect(&token::CloseDelim(token::Paren))); panictry!(p.expect(&token::CloseDelim(token::Paren)));
// Expands a read+write operand into two operands. // Expands a read+write operand into two operands.
@ -146,7 +146,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
} }
panictry!(p.expect(&token::OpenDelim(token::Paren))); panictry!(p.expect(&token::OpenDelim(token::Paren)));
let input = p.parse_expr(); let input = panictry!(p.parse_expr_nopanic());
panictry!(p.expect(&token::CloseDelim(token::Paren))); panictry!(p.expect(&token::CloseDelim(token::Paren)));
inputs.push((constraint, input)); inputs.push((constraint, input));

View file

@ -809,7 +809,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
cx.span_err(sp, &format!("{} takes 1 argument", name)); cx.span_err(sp, &format!("{} takes 1 argument", name));
return None return None
} }
let ret = cx.expander().fold_expr(p.parse_expr()); let ret = cx.expander().fold_expr(panictry!(p.parse_expr_nopanic()));
if p.token != token::Eof { if p.token != token::Eof {
cx.span_err(sp, &format!("{} takes 1 argument", name)); cx.span_err(sp, &format!("{} takes 1 argument", name));
} }
@ -826,7 +826,7 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
let mut p = cx.new_parser_from_tts(tts); let mut p = cx.new_parser_from_tts(tts);
let mut es = Vec::new(); let mut es = Vec::new();
while p.token != token::Eof { while p.token != token::Eof {
es.push(cx.expander().fold_expr(p.parse_expr())); es.push(cx.expander().fold_expr(panictry!(p.parse_expr_nopanic())));
if panictry!(p.eat(&token::Comma)){ if panictry!(p.eat(&token::Comma)){
continue; continue;
} }

View file

@ -19,7 +19,6 @@ use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use attr; use attr;
use attr::*; use attr::*;
use parse::attr::ParserAttr;
use parse::token; use parse::token;
pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
@ -27,7 +26,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
let mut p = cx.new_parser_from_tts(tts); let mut p = cx.new_parser_from_tts(tts);
let cfg = p.parse_meta_item(); let cfg = panictry!(p.parse_meta_item());
if !panictry!(p.eat(&token::Eof)){ if !panictry!(p.eat(&token::Eof)){
cx.span_err(sp, "expected 1 cfg-pattern"); cx.span_err(sp, "expected 1 cfg-pattern");

View file

@ -93,7 +93,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
ecx.span_err(sp, "requires at least a format string argument"); ecx.span_err(sp, "requires at least a format string argument");
return None; return None;
} }
let fmtstr = p.parse_expr(); let fmtstr = panictry!(p.parse_expr_nopanic());
let mut named = false; let mut named = false;
while p.token != token::Eof { while p.token != token::Eof {
if !panictry!(p.eat(&token::Comma)) { if !panictry!(p.eat(&token::Comma)) {
@ -124,7 +124,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let name: &str = &ident.name.as_str(); let name: &str = &ident.name.as_str();
panictry!(p.expect(&token::Eq)); panictry!(p.expect(&token::Eq));
let e = p.parse_expr(); let e = panictry!(p.parse_expr_nopanic());
match names.get(name) { match names.get(name) {
None => {} None => {}
Some(prev) => { Some(prev) => {
@ -138,7 +138,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
order.push(name.to_string()); order.push(name.to_string());
names.insert(name.to_string(), e); names.insert(name.to_string(), e);
} else { } else {
args.push(p.parse_expr()); args.push(panictry!(p.parse_expr_nopanic()));
} }
} }
Some((fmtstr, args, order, names)) Some((fmtstr, args, order, names))

View file

@ -327,7 +327,7 @@ pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> { -> Box<base::MacResult+'cx> {
let expanded = expand_parse_call(cx, sp, "parse_expr", vec!(), tts); let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
@ -335,7 +335,7 @@ pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> { -> Box<base::MacResult+'cx> {
let expanded = expand_parse_call(cx, sp, "parse_item", vec!(), tts); let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
@ -343,7 +343,7 @@ pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> { -> Box<base::MacResult+'cx> {
let expanded = expand_parse_call(cx, sp, "parse_pat", vec!(), tts); let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
@ -351,7 +351,7 @@ pub fn expand_quote_arm(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
let expanded = expand_parse_call(cx, sp, "parse_arm", vec!(), tts); let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
@ -359,7 +359,7 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
let expanded = expand_parse_call(cx, sp, "parse_ty", vec!(), tts); let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
@ -367,7 +367,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
let expanded = expand_parse_call(cx, sp, "parse_stmt", vec!(), tts); let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
@ -375,7 +375,7 @@ pub fn expand_quote_attr(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
let expanded = expand_parse_call(cx, sp, "parse_attribute", let expanded = expand_parse_call(cx, sp, "parse_attribute_panic",
vec!(cx.expr_bool(sp, true)), tts); vec!(cx.expr_bool(sp, true)), tts);
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
@ -694,7 +694,7 @@ fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree])
let mut p = cx.new_parser_from_tts(tts); let mut p = cx.new_parser_from_tts(tts);
p.quote_depth += 1; p.quote_depth += 1;
let cx_expr = p.parse_expr(); let cx_expr = panictry!(p.parse_expr_nopanic());
if !panictry!(p.eat(&token::Comma)) { if !panictry!(p.eat(&token::Comma)) {
panic!(p.fatal("expected token `,`")); panic!(p.fatal("expected token `,`"));
} }

View file

@ -109,13 +109,13 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree
} }
impl<'a> base::MacResult for ExpandResult<'a> { impl<'a> base::MacResult for ExpandResult<'a> {
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> { fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
Some(self.p.parse_expr()) Some(panictry!(self.p.parse_expr_nopanic()))
} }
fn make_items(mut self: Box<ExpandResult<'a>>) fn make_items(mut self: Box<ExpandResult<'a>>)
-> Option<SmallVector<P<ast::Item>>> { -> Option<SmallVector<P<ast::Item>>> {
let mut ret = SmallVector::zero(); let mut ret = SmallVector::zero();
while self.p.token != token::Eof { while self.p.token != token::Eof {
match self.p.parse_item() { match panictry!(self.p.parse_item_nopanic()) {
Some(item) => ret.push(item), Some(item) => ret.push(item),
None => panic!(self.p.span_fatal( None => panic!(self.p.span_fatal(
self.p.span, self.p.span,

View file

@ -85,7 +85,6 @@ use codemap::{BytePos, mk_sp, Span};
use codemap; use codemap;
use parse::lexer::*; //resolve bug? use parse::lexer::*; //resolve bug?
use parse::ParseSess; use parse::ParseSess;
use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Eof, DocComment, MatchNt, SubstNt}; use parse::token::{Eof, DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal}; use parse::token::{Token, Nonterminal};
@ -503,18 +502,18 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
// check at the beginning and the parser checks after each bump // check at the beginning and the parser checks after each bump
panictry!(p.check_unknown_macro_variable()); panictry!(p.check_unknown_macro_variable());
match name { match name {
"item" => match p.parse_item() { "item" => match panictry!(p.parse_item_nopanic()) {
Some(i) => token::NtItem(i), Some(i) => token::NtItem(i),
None => panic!(p.fatal("expected an item keyword")) None => panic!(p.fatal("expected an item keyword"))
}, },
"block" => token::NtBlock(panictry!(p.parse_block())), "block" => token::NtBlock(panictry!(p.parse_block())),
"stmt" => match p.parse_stmt() { "stmt" => match panictry!(p.parse_stmt_nopanic()) {
Some(s) => token::NtStmt(s), Some(s) => token::NtStmt(s),
None => panic!(p.fatal("expected a statement")) None => panic!(p.fatal("expected a statement"))
}, },
"pat" => token::NtPat(p.parse_pat()), "pat" => token::NtPat(panictry!(p.parse_pat_nopanic())),
"expr" => token::NtExpr(p.parse_expr()), "expr" => token::NtExpr(panictry!(p.parse_expr_nopanic())),
"ty" => token::NtTy(p.parse_ty()), "ty" => token::NtTy(panictry!(p.parse_ty_nopanic())),
// this could be handled like a token, since it is one // this could be handled like a token, since it is one
"ident" => match p.token { "ident" => match p.token {
token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(Box::new(sn),b) } token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(Box::new(sn),b) }
@ -527,7 +526,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
"path" => { "path" => {
token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))) token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons))))
}, },
"meta" => token::NtMeta(p.parse_meta_item()), "meta" => token::NtMeta(panictry!(p.parse_meta_item())),
_ => { _ => {
panic!(p.span_fatal_help(sp, panic!(p.span_fatal_help(sp,
&format!("invalid fragment specifier `{}`", name), &format!("invalid fragment specifier `{}`", name),

View file

@ -66,18 +66,18 @@ impl<'a> ParserAnyMacro<'a> {
impl<'a> MacResult for ParserAnyMacro<'a> { impl<'a> MacResult for ParserAnyMacro<'a> {
fn make_expr(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Expr>> { fn make_expr(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Expr>> {
let ret = self.parser.borrow_mut().parse_expr(); let ret = panictry!(self.parser.borrow_mut().parse_expr_nopanic());
self.ensure_complete_parse(true); self.ensure_complete_parse(true);
Some(ret) Some(ret)
} }
fn make_pat(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Pat>> { fn make_pat(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Pat>> {
let ret = self.parser.borrow_mut().parse_pat(); let ret = panictry!(self.parser.borrow_mut().parse_pat_nopanic());
self.ensure_complete_parse(false); self.ensure_complete_parse(false);
Some(ret) Some(ret)
} }
fn make_items(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Item>>> { fn make_items(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Item>>> {
let mut ret = SmallVector::zero(); let mut ret = SmallVector::zero();
while let Some(item) = self.parser.borrow_mut().parse_item() { while let Some(item) = panictry!(self.parser.borrow_mut().parse_item_nopanic()) {
ret.push(item); ret.push(item);
} }
self.ensure_complete_parse(false); self.ensure_complete_parse(false);
@ -119,7 +119,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> {
} }
fn make_ty(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Ty>> { fn make_ty(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Ty>> {
let ret = self.parser.borrow_mut().parse_ty(); let ret = panictry!(self.parser.borrow_mut().parse_ty_nopanic());
self.ensure_complete_parse(true); self.ensure_complete_parse(true);
Some(ret) Some(ret)
} }

View file

@ -30,7 +30,6 @@
#![feature(filling_drop)] #![feature(filling_drop)]
#![feature(libc)] #![feature(libc)]
#![feature(rustc_private)] #![feature(rustc_private)]
#![feature(set_stdio)]
#![feature(staged_api)] #![feature(staged_api)]
#![feature(str_char)] #![feature(str_char)]
#![feature(str_escape)] #![feature(str_escape)]

View file

@ -12,30 +12,21 @@ use attr;
use ast; use ast;
use codemap::{spanned, Spanned, mk_sp, Span}; use codemap::{spanned, Spanned, mk_sp, Span};
use parse::common::*; //resolve bug? use parse::common::*; //resolve bug?
use parse::PResult;
use parse::token; use parse::token;
use parse::parser::{Parser, TokenType}; use parse::parser::{Parser, TokenType};
use ptr::P; use ptr::P;
/// A parser that can parse attributes. impl<'a> Parser<'a> {
pub trait ParserAttr {
fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute>;
fn parse_inner_attributes(&mut self) -> Vec<ast::Attribute>;
fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute;
fn parse_meta_item(&mut self) -> P<ast::MetaItem>;
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>>;
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>>;
}
impl<'a> ParserAttr for Parser<'a> {
/// Parse attributes that appear before an item /// Parse attributes that appear before an item
fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> { pub fn parse_outer_attributes(&mut self) -> PResult<Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = Vec::new(); let mut attrs: Vec<ast::Attribute> = Vec::new();
loop { loop {
debug!("parse_outer_attributes: self.token={:?}", debug!("parse_outer_attributes: self.token={:?}",
self.token); self.token);
match self.token { match self.token {
token::Pound => { token::Pound => {
attrs.push(self.parse_attribute(false)); attrs.push(try!(self.parse_attribute(false)));
} }
token::DocComment(s) => { token::DocComment(s) => {
let attr = ::attr::mk_sugared_doc_attr( let attr = ::attr::mk_sugared_doc_attr(
@ -45,32 +36,32 @@ impl<'a> ParserAttr for Parser<'a> {
self.span.hi self.span.hi
); );
if attr.node.style != ast::AttrStyle::Outer { if attr.node.style != ast::AttrStyle::Outer {
panic!(self.fatal("expected outer comment")); return Err(self.fatal("expected outer comment"));
} }
attrs.push(attr); attrs.push(attr);
panictry!(self.bump()); try!(self.bump());
} }
_ => break _ => break
} }
} }
return attrs; return Ok(attrs);
} }
/// Matches `attribute = # ! [ meta_item ]` /// Matches `attribute = # ! [ meta_item ]`
/// ///
/// If permit_inner is true, then a leading `!` indicates an inner /// If permit_inner is true, then a leading `!` indicates an inner
/// attribute /// attribute
fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute { pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<ast::Attribute> {
debug!("parse_attributes: permit_inner={:?} self.token={:?}", debug!("parse_attributes: permit_inner={:?} self.token={:?}",
permit_inner, self.token); permit_inner, self.token);
let (span, value, mut style) = match self.token { let (span, value, mut style) = match self.token {
token::Pound => { token::Pound => {
let lo = self.span.lo; let lo = self.span.lo;
panictry!(self.bump()); try!(self.bump());
if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); } if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); }
let style = if self.token == token::Not { let style = if self.token == token::Not {
panictry!(self.bump()); try!(self.bump());
if !permit_inner { if !permit_inner {
let span = self.span; let span = self.span;
self.span_err(span, self.span_err(span,
@ -84,27 +75,27 @@ impl<'a> ParserAttr for Parser<'a> {
ast::AttrStyle::Outer ast::AttrStyle::Outer
}; };
panictry!(self.expect(&token::OpenDelim(token::Bracket))); try!(self.expect(&token::OpenDelim(token::Bracket)));
let meta_item = self.parse_meta_item(); let meta_item = try!(self.parse_meta_item());
let hi = self.span.hi; let hi = self.span.hi;
panictry!(self.expect(&token::CloseDelim(token::Bracket))); try!(self.expect(&token::CloseDelim(token::Bracket)));
(mk_sp(lo, hi), meta_item, style) (mk_sp(lo, hi), meta_item, style)
} }
_ => { _ => {
let token_str = self.this_token_to_string(); let token_str = self.this_token_to_string();
panic!(self.fatal(&format!("expected `#`, found `{}`", token_str))); return Err(self.fatal(&format!("expected `#`, found `{}`", token_str)));
} }
}; };
if permit_inner && self.token == token::Semi { if permit_inner && self.token == token::Semi {
panictry!(self.bump()); try!(self.bump());
self.span_warn(span, "this inner attribute syntax is deprecated. \ self.span_warn(span, "this inner attribute syntax is deprecated. \
The new syntax is `#![foo]`, with a bang and no semicolon"); The new syntax is `#![foo]`, with a bang and no semicolon");
style = ast::AttrStyle::Inner; style = ast::AttrStyle::Inner;
} }
return Spanned { Ok(Spanned {
span: span, span: span,
node: ast::Attribute_ { node: ast::Attribute_ {
id: attr::mk_attr_id(), id: attr::mk_attr_id(),
@ -112,7 +103,7 @@ impl<'a> ParserAttr for Parser<'a> {
value: value, value: value,
is_sugared_doc: false is_sugared_doc: false
} }
}; })
} }
/// Parse attributes that appear after the opening of an item. These should /// Parse attributes that appear after the opening of an item. These should
@ -120,7 +111,7 @@ impl<'a> ParserAttr for Parser<'a> {
/// terminated by a semicolon. /// terminated by a semicolon.
/// matches inner_attrs* /// matches inner_attrs*
fn parse_inner_attributes(&mut self) -> Vec<ast::Attribute> { pub fn parse_inner_attributes(&mut self) -> PResult<Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = vec![]; let mut attrs: Vec<ast::Attribute> = vec![];
loop { loop {
match self.token { match self.token {
@ -130,7 +121,7 @@ impl<'a> ParserAttr for Parser<'a> {
break; break;
} }
let attr = self.parse_attribute(true); let attr = try!(self.parse_attribute(true));
assert!(attr.node.style == ast::AttrStyle::Inner); assert!(attr.node.style == ast::AttrStyle::Inner);
attrs.push(attr); attrs.push(attr);
} }
@ -141,7 +132,7 @@ impl<'a> ParserAttr for Parser<'a> {
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi); let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
if attr.node.style == ast::AttrStyle::Inner { if attr.node.style == ast::AttrStyle::Inner {
attrs.push(attr); attrs.push(attr);
panictry!(self.bump()); try!(self.bump());
} else { } else {
break; break;
} }
@ -149,13 +140,13 @@ impl<'a> ParserAttr for Parser<'a> {
_ => break _ => break
} }
} }
attrs Ok(attrs)
} }
/// matches meta_item = IDENT /// matches meta_item = IDENT
/// | IDENT = lit /// | IDENT = lit
/// | IDENT meta_seq /// | IDENT meta_seq
fn parse_meta_item(&mut self) -> P<ast::MetaItem> { pub fn parse_meta_item(&mut self) -> PResult<P<ast::MetaItem>> {
let nt_meta = match self.token { let nt_meta = match self.token {
token::Interpolated(token::NtMeta(ref e)) => { token::Interpolated(token::NtMeta(ref e)) => {
Some(e.clone()) Some(e.clone())
@ -165,19 +156,19 @@ impl<'a> ParserAttr for Parser<'a> {
match nt_meta { match nt_meta {
Some(meta) => { Some(meta) => {
panictry!(self.bump()); try!(self.bump());
return meta; return Ok(meta);
} }
None => {} None => {}
} }
let lo = self.span.lo; let lo = self.span.lo;
let ident = panictry!(self.parse_ident()); let ident = try!(self.parse_ident());
let name = self.id_to_interned_str(ident); let name = self.id_to_interned_str(ident);
match self.token { match self.token {
token::Eq => { token::Eq => {
panictry!(self.bump()); try!(self.bump());
let lit = panictry!(self.parse_lit()); let lit = try!(self.parse_lit());
// FIXME #623 Non-string meta items are not serialized correctly; // FIXME #623 Non-string meta items are not serialized correctly;
// just forbid them for now // just forbid them for now
match lit.node { match lit.node {
@ -189,32 +180,25 @@ impl<'a> ParserAttr for Parser<'a> {
} }
} }
let hi = self.span.hi; let hi = self.span.hi;
P(spanned(lo, hi, ast::MetaNameValue(name, lit))) Ok(P(spanned(lo, hi, ast::MetaNameValue(name, lit))))
} }
token::OpenDelim(token::Paren) => { token::OpenDelim(token::Paren) => {
let inner_items = self.parse_meta_seq(); let inner_items = try!(self.parse_meta_seq());
let hi = self.span.hi; let hi = self.span.hi;
P(spanned(lo, hi, ast::MetaList(name, inner_items))) Ok(P(spanned(lo, hi, ast::MetaList(name, inner_items))))
} }
_ => { _ => {
let hi = self.last_span.hi; let hi = self.last_span.hi;
P(spanned(lo, hi, ast::MetaWord(name))) Ok(P(spanned(lo, hi, ast::MetaWord(name))))
} }
} }
} }
/// matches meta_seq = ( COMMASEP(meta_item) ) /// matches meta_seq = ( COMMASEP(meta_item) )
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> { fn parse_meta_seq(&mut self) -> PResult<Vec<P<ast::MetaItem>>> {
panictry!(self.parse_seq(&token::OpenDelim(token::Paren), self.parse_unspanned_seq(&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren), &token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma), seq_sep_trailing_allowed(token::Comma),
|p| Ok(p.parse_meta_item()))).node |p| p.parse_meta_item())
}
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
match self.token {
token::OpenDelim(token::Paren) => self.parse_meta_seq(),
_ => Vec::new()
}
} }
} }

View file

@ -270,7 +270,7 @@ fn read_block_comment(rdr: &mut StringReader,
while level > 0 { while level > 0 {
debug!("=== block comment level {}", level); debug!("=== block comment level {}", level);
if rdr.is_eof() { if rdr.is_eof() {
rdr.fatal("unterminated block comment"); panic!(rdr.fatal("unterminated block comment"));
} }
if rdr.curr_is('\n') { if rdr.curr_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines, trim_whitespace_prefix_and_push_line(&mut lines,

View file

@ -11,6 +11,7 @@
use ast; use ast;
use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; use codemap::{BytePos, CharPos, CodeMap, Pos, Span};
use codemap; use codemap;
use diagnostic::FatalError;
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use ext::tt::transcribe::tt_next_token; use ext::tt::transcribe::tt_next_token;
use parse::token::str_to_ident; use parse::token::str_to_ident;
@ -30,7 +31,7 @@ pub trait Reader {
fn is_eof(&self) -> bool; fn is_eof(&self) -> bool;
fn next_token(&mut self) -> TokenAndSpan; fn next_token(&mut self) -> TokenAndSpan;
/// Report a fatal error with the current span. /// Report a fatal error with the current span.
fn fatal(&self, &str) -> !; fn fatal(&self, &str) -> FatalError;
/// Report a non-fatal error with the current span. /// Report a non-fatal error with the current span.
fn err(&self, &str); fn err(&self, &str);
fn peek(&self) -> TokenAndSpan; fn peek(&self) -> TokenAndSpan;
@ -86,7 +87,7 @@ impl<'a> Reader for StringReader<'a> {
self.advance_token(); self.advance_token();
ret_val ret_val
} }
fn fatal(&self, m: &str) -> ! { fn fatal(&self, m: &str) -> FatalError {
self.fatal_span(self.peek_span, m) self.fatal_span(self.peek_span, m)
} }
fn err(&self, m: &str) { fn err(&self, m: &str) {
@ -110,8 +111,8 @@ impl<'a> Reader for TtReader<'a> {
debug!("TtReader: r={:?}", r); debug!("TtReader: r={:?}", r);
r r
} }
fn fatal(&self, m: &str) -> ! { fn fatal(&self, m: &str) -> FatalError {
panic!(self.sp_diag.span_fatal(self.cur_span, m)); self.sp_diag.span_fatal(self.cur_span, m)
} }
fn err(&self, m: &str) { fn err(&self, m: &str) {
self.sp_diag.span_err(self.cur_span, m); self.sp_diag.span_err(self.cur_span, m);
@ -163,8 +164,8 @@ impl<'a> StringReader<'a> {
} }
/// Report a fatal lexical error with a given span. /// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> ! { pub fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
panic!(self.span_diagnostic.span_fatal(sp, m)) self.span_diagnostic.span_fatal(sp, m)
} }
/// Report a lexical error with a given span. /// Report a lexical error with a given span.
@ -178,7 +179,7 @@ impl<'a> StringReader<'a> {
} }
/// Report a fatal error spanning [`from_pos`, `to_pos`). /// Report a fatal error spanning [`from_pos`, `to_pos`).
fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! { fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
self.fatal_span(codemap::mk_sp(from_pos, to_pos), m) self.fatal_span(codemap::mk_sp(from_pos, to_pos), m)
} }
@ -194,11 +195,11 @@ impl<'a> StringReader<'a> {
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
/// escaped character to the error message /// escaped character to the error message
fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> ! { fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> FatalError {
let mut m = m.to_string(); let mut m = m.to_string();
m.push_str(": "); m.push_str(": ");
for c in c.escape_default() { m.push(c) } for c in c.escape_default() { m.push(c) }
self.fatal_span_(from_pos, to_pos, &m[..]); self.fatal_span_(from_pos, to_pos, &m[..])
} }
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@ -212,12 +213,12 @@ impl<'a> StringReader<'a> {
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
/// offending string to the error message /// offending string to the error message
fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> ! { fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> FatalError {
m.push_str(": "); m.push_str(": ");
let from = self.byte_offset(from_pos).to_usize(); let from = self.byte_offset(from_pos).to_usize();
let to = self.byte_offset(to_pos).to_usize(); let to = self.byte_offset(to_pos).to_usize();
m.push_str(&self.source_text[from..to]); m.push_str(&self.source_text[from..to]);
self.fatal_span_(from_pos, to_pos, &m[..]); self.fatal_span_(from_pos, to_pos, &m[..])
} }
/// Advance peek_tok and peek_span to refer to the next token, and /// Advance peek_tok and peek_span to refer to the next token, and
@ -538,7 +539,7 @@ impl<'a> StringReader<'a> {
"unterminated block comment" "unterminated block comment"
}; };
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
self.fatal_span_(start_bpos, last_bpos, msg); panic!(self.fatal_span_(start_bpos, last_bpos, msg));
} }
let n = self.curr.unwrap(); let n = self.curr.unwrap();
match n { match n {
@ -682,7 +683,9 @@ impl<'a> StringReader<'a> {
for _ in 0..n_digits { for _ in 0..n_digits {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
self.fatal_span_(start_bpos, last_bpos, "unterminated numeric character escape"); panic!(self.fatal_span_(start_bpos,
last_bpos,
"unterminated numeric character escape"));
} }
if self.curr_is(delim) { if self.curr_is(delim) {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
@ -835,15 +838,15 @@ impl<'a> StringReader<'a> {
let c = match self.curr { let c = match self.curr {
Some(c) => c, Some(c) => c,
None => { None => {
self.fatal_span_(start_bpos, self.last_pos, panic!(self.fatal_span_(start_bpos, self.last_pos,
"unterminated unicode escape (found EOF)"); "unterminated unicode escape (found EOF)"));
} }
}; };
accum_int *= 16; accum_int *= 16;
accum_int += c.to_digit(16).unwrap_or_else(|| { accum_int += c.to_digit(16).unwrap_or_else(|| {
if c == delim { if c == delim {
self.fatal_span_(self.last_pos, self.pos, panic!(self.fatal_span_(self.last_pos, self.pos,
"unterminated unicode escape (needed a `}`)"); "unterminated unicode escape (needed a `}`)"));
} else { } else {
self.err_span_char(self.last_pos, self.pos, self.err_span_char(self.last_pos, self.pos,
"invalid character in unicode escape", c); "invalid character in unicode escape", c);
@ -1077,12 +1080,12 @@ impl<'a> StringReader<'a> {
let valid = self.scan_char_or_byte(start, c2, /* ascii_only = */ false, '\''); let valid = self.scan_char_or_byte(start, c2, /* ascii_only = */ false, '\'');
if !self.curr_is('\'') { if !self.curr_is('\'') {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
self.fatal_span_verbose( panic!(self.fatal_span_verbose(
// Byte offsetting here is okay because the // Byte offsetting here is okay because the
// character before position `start` is an // character before position `start` is an
// ascii single quote. // ascii single quote.
start - BytePos(1), last_bpos, start - BytePos(1), last_bpos,
"unterminated character constant".to_string()); "unterminated character constant".to_string()));
} }
let id = if valid { self.name_from(start) } else { token::intern("0") }; let id = if valid { self.name_from(start) } else { token::intern("0") };
self.bump(); // advance curr past token self.bump(); // advance curr past token
@ -1107,7 +1110,9 @@ impl<'a> StringReader<'a> {
while !self.curr_is('"') { while !self.curr_is('"') {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
self.fatal_span_(start_bpos, last_bpos, "unterminated double quote string"); panic!(self.fatal_span_(start_bpos,
last_bpos,
"unterminated double quote string"));
} }
let ch_start = self.last_pos; let ch_start = self.last_pos;
@ -1133,14 +1138,14 @@ impl<'a> StringReader<'a> {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"); panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"));
} else if !self.curr_is('"') { } else if !self.curr_is('"') {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
let curr_char = self.curr.unwrap(); let curr_char = self.curr.unwrap();
self.fatal_span_char(start_bpos, last_bpos, panic!(self.fatal_span_char(start_bpos, last_bpos,
"found invalid character; \ "found invalid character; \
only `#` is allowed in raw string delimitation", only `#` is allowed in raw string delimitation",
curr_char); curr_char));
} }
self.bump(); self.bump();
let content_start_bpos = self.last_pos; let content_start_bpos = self.last_pos;
@ -1149,7 +1154,7 @@ impl<'a> StringReader<'a> {
'outer: loop { 'outer: loop {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"); panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"));
} }
//if self.curr_is('"') { //if self.curr_is('"') {
//content_end_bpos = self.last_pos; //content_end_bpos = self.last_pos;
@ -1218,7 +1223,7 @@ impl<'a> StringReader<'a> {
c => { c => {
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
let bpos = self.pos; let bpos = self.pos;
self.fatal_span_char(last_bpos, bpos, "unknown start of token", c); panic!(self.fatal_span_char(last_bpos, bpos, "unknown start of token", c));
} }
} }
} }
@ -1271,9 +1276,9 @@ impl<'a> StringReader<'a> {
// character before position `start` are an // character before position `start` are an
// ascii single quote and ascii 'b'. // ascii single quote and ascii 'b'.
let last_pos = self.last_pos; let last_pos = self.last_pos;
self.fatal_span_verbose( panic!(self.fatal_span_verbose(
start - BytePos(2), last_pos, start - BytePos(2), last_pos,
"unterminated byte constant".to_string()); "unterminated byte constant".to_string()));
} }
let id = if valid { self.name_from(start) } else { token::intern("?") }; let id = if valid { self.name_from(start) } else { token::intern("?") };
@ -1293,8 +1298,7 @@ impl<'a> StringReader<'a> {
while !self.curr_is('"') { while !self.curr_is('"') {
if self.is_eof() { if self.is_eof() {
let last_pos = self.last_pos; let last_pos = self.last_pos;
self.fatal_span_(start, last_pos, panic!(self.fatal_span_(start, last_pos, "unterminated double quote byte string"));
"unterminated double quote byte string");
} }
let ch_start = self.last_pos; let ch_start = self.last_pos;
@ -1318,14 +1322,14 @@ impl<'a> StringReader<'a> {
if self.is_eof() { if self.is_eof() {
let last_pos = self.last_pos; let last_pos = self.last_pos;
self.fatal_span_(start_bpos, last_pos, "unterminated raw string"); panic!(self.fatal_span_(start_bpos, last_pos, "unterminated raw string"));
} else if !self.curr_is('"') { } else if !self.curr_is('"') {
let last_pos = self.last_pos; let last_pos = self.last_pos;
let ch = self.curr.unwrap(); let ch = self.curr.unwrap();
self.fatal_span_char(start_bpos, last_pos, panic!(self.fatal_span_char(start_bpos, last_pos,
"found invalid character; \ "found invalid character; \
only `#` is allowed in raw string delimitation", only `#` is allowed in raw string delimitation",
ch); ch));
} }
self.bump(); self.bump();
let content_start_bpos = self.last_pos; let content_start_bpos = self.last_pos;
@ -1334,7 +1338,7 @@ impl<'a> StringReader<'a> {
match self.curr { match self.curr {
None => { None => {
let last_pos = self.last_pos; let last_pos = self.last_pos;
self.fatal_span_(start_bpos, last_pos, "unterminated raw string") panic!(self.fatal_span_(start_bpos, last_pos, "unterminated raw string"))
}, },
Some('"') => { Some('"') => {
content_end_bpos = self.last_pos; content_end_bpos = self.last_pos;

View file

@ -13,7 +13,6 @@
use ast; use ast;
use codemap::{self, Span, CodeMap, FileMap}; use codemap::{self, Span, CodeMap, FileMap};
use diagnostic::{SpanHandler, Handler, Auto, FatalError}; use diagnostic::{SpanHandler, Handler, Auto, FatalError};
use parse::attr::ParserAttr;
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::InternedString; use parse::token::InternedString;
use ptr::P; use ptr::P;
@ -83,7 +82,8 @@ pub fn parse_crate_attrs_from_file(
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess sess: &ParseSess
) -> Vec<ast::Attribute> { ) -> Vec<ast::Attribute> {
new_parser_from_file(sess, cfg, input).parse_inner_attributes() // FIXME: maybe_aborted?
panictry!(new_parser_from_file(sess, cfg, input).parse_inner_attributes())
} }
pub fn parse_crate_from_source_str(name: String, pub fn parse_crate_from_source_str(name: String,
@ -107,7 +107,7 @@ pub fn parse_crate_attrs_from_source_str(name: String,
cfg, cfg,
name, name,
source); source);
maybe_aborted(p.parse_inner_attributes(), p) maybe_aborted(panictry!(p.parse_inner_attributes()), p)
} }
pub fn parse_expr_from_source_str(name: String, pub fn parse_expr_from_source_str(name: String,
@ -116,7 +116,7 @@ pub fn parse_expr_from_source_str(name: String,
sess: &ParseSess) sess: &ParseSess)
-> P<ast::Expr> { -> P<ast::Expr> {
let mut p = new_parser_from_source_str(sess, cfg, name, source); let mut p = new_parser_from_source_str(sess, cfg, name, source);
maybe_aborted(p.parse_expr(), p) maybe_aborted(panictry!(p.parse_expr_nopanic()), p)
} }
pub fn parse_item_from_source_str(name: String, pub fn parse_item_from_source_str(name: String,
@ -125,7 +125,7 @@ pub fn parse_item_from_source_str(name: String,
sess: &ParseSess) sess: &ParseSess)
-> Option<P<ast::Item>> { -> Option<P<ast::Item>> {
let mut p = new_parser_from_source_str(sess, cfg, name, source); let mut p = new_parser_from_source_str(sess, cfg, name, source);
maybe_aborted(p.parse_item(),p) maybe_aborted(panictry!(p.parse_item_nopanic()), p)
} }
pub fn parse_meta_from_source_str(name: String, pub fn parse_meta_from_source_str(name: String,
@ -134,7 +134,7 @@ pub fn parse_meta_from_source_str(name: String,
sess: &ParseSess) sess: &ParseSess)
-> P<ast::MetaItem> { -> P<ast::MetaItem> {
let mut p = new_parser_from_source_str(sess, cfg, name, source); let mut p = new_parser_from_source_str(sess, cfg, name, source);
maybe_aborted(p.parse_meta_item(),p) maybe_aborted(panictry!(p.parse_meta_item()), p)
} }
pub fn parse_stmt_from_source_str(name: String, pub fn parse_stmt_from_source_str(name: String,
@ -148,7 +148,7 @@ pub fn parse_stmt_from_source_str(name: String,
name, name,
source source
); );
maybe_aborted(p.parse_stmt(), p) maybe_aborted(panictry!(p.parse_stmt_nopanic()), p)
} }
// Warning: This parses with quote_depth > 0, which is not the default. // Warning: This parses with quote_depth > 0, which is not the default.
@ -235,7 +235,7 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
let msg = format!("couldn't read {:?}: {}", path.display(), e); let msg = format!("couldn't read {:?}: {}", path.display(), e);
match spanopt { match spanopt {
Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)), Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)),
None => sess.span_diagnostic.handler().fatal(&msg) None => panic!(sess.span_diagnostic.handler().fatal(&msg))
} }
} }
} }
@ -856,7 +856,7 @@ mod tests {
#[test] fn parse_stmt_1 () { #[test] fn parse_stmt_1 () {
assert!(string_to_stmt("b;".to_string()) == assert!(string_to_stmt("b;".to_string()) ==
P(Spanned{ Some(P(Spanned{
node: ast::StmtExpr(P(ast::Expr { node: ast::StmtExpr(P(ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(None, ast::Path { node: ast::ExprPath(None, ast::Path {
@ -871,7 +871,7 @@ mod tests {
}), }),
span: sp(0,1)}), span: sp(0,1)}),
ast::DUMMY_NODE_ID), ast::DUMMY_NODE_ID),
span: sp(0,1)})) span: sp(0,1)})))
} }

View file

@ -63,7 +63,6 @@ use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, CodeMap};
use diagnostic; use diagnostic;
use ext::tt::macro_parser; use ext::tt::macro_parser;
use parse; use parse;
use parse::attr::ParserAttr;
use parse::classify; use parse::classify;
use parse::common::{SeqSep, seq_sep_none, seq_sep_trailing_allowed}; use parse::common::{SeqSep, seq_sep_none, seq_sep_trailing_allowed};
use parse::lexer::{Reader, TokenAndSpan}; use parse::lexer::{Reader, TokenAndSpan};
@ -358,31 +357,36 @@ impl<'a> Parser<'a> {
} }
// Panicing fns (for now!) // Panicing fns (for now!)
// This is so that the quote_*!() syntax extensions // These functions are used by the quote_*!() syntax extensions, but shouldn't
pub fn parse_expr(&mut self) -> P<Expr> { // be used otherwise.
pub fn parse_expr_panic(&mut self) -> P<Expr> {
panictry!(self.parse_expr_nopanic()) panictry!(self.parse_expr_nopanic())
} }
pub fn parse_item(&mut self) -> Option<P<Item>> { pub fn parse_item_panic(&mut self) -> Option<P<Item>> {
panictry!(self.parse_item_nopanic()) panictry!(self.parse_item_nopanic())
} }
pub fn parse_pat(&mut self) -> P<Pat> { pub fn parse_pat_panic(&mut self) -> P<Pat> {
panictry!(self.parse_pat_nopanic()) panictry!(self.parse_pat_nopanic())
} }
pub fn parse_arm(&mut self) -> Arm { pub fn parse_arm_panic(&mut self) -> Arm {
panictry!(self.parse_arm_nopanic()) panictry!(self.parse_arm_nopanic())
} }
pub fn parse_ty(&mut self) -> P<Ty> { pub fn parse_ty_panic(&mut self) -> P<Ty> {
panictry!(self.parse_ty_nopanic()) panictry!(self.parse_ty_nopanic())
} }
pub fn parse_stmt(&mut self) -> Option<P<Stmt>> { pub fn parse_stmt_panic(&mut self) -> Option<P<Stmt>> {
panictry!(self.parse_stmt_nopanic()) panictry!(self.parse_stmt_nopanic())
} }
pub fn parse_attribute_panic(&mut self, permit_inner: bool) -> ast::Attribute {
panictry!(self.parse_attribute(permit_inner))
}
/// Convert a token to a string using self's reader /// Convert a token to a string using self's reader
pub fn token_to_string(token: &token::Token) -> String { pub fn token_to_string(token: &token::Token) -> String {
pprust::token_to_string(token) pprust::token_to_string(token)
@ -1173,7 +1177,7 @@ impl<'a> Parser<'a> {
seq_sep_none(), seq_sep_none(),
|p| -> PResult<P<TraitItem>> { |p| -> PResult<P<TraitItem>> {
maybe_whole!(no_clone p, NtTraitItem); maybe_whole!(no_clone p, NtTraitItem);
let mut attrs = p.parse_outer_attributes(); let mut attrs = try!(p.parse_outer_attributes());
let lo = p.span.lo; let lo = p.span.lo;
let (name, node) = if try!(p.eat_keyword(keywords::Type)) { let (name, node) = if try!(p.eat_keyword(keywords::Type)) {
@ -2961,7 +2965,7 @@ impl<'a> Parser<'a> {
pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> { pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> {
maybe_whole!(no_clone self, NtArm); maybe_whole!(no_clone self, NtArm);
let attrs = self.parse_outer_attributes(); let attrs = try!(self.parse_outer_attributes());
let pats = try!(self.parse_pats()); let pats = try!(self.parse_pats());
let mut guard = None; let mut guard = None;
if try!(self.eat_keyword(keywords::If) ){ if try!(self.eat_keyword(keywords::If) ){
@ -3470,7 +3474,7 @@ impl<'a> Parser<'a> {
} }
} }
let attrs = self.parse_outer_attributes(); let attrs = try!(self.parse_outer_attributes());
let lo = self.span.lo; let lo = self.span.lo;
Ok(Some(if self.check_keyword(keywords::Let) { Ok(Some(if self.check_keyword(keywords::Let) {
@ -3612,7 +3616,7 @@ impl<'a> Parser<'a> {
let lo = self.span.lo; let lo = self.span.lo;
try!(self.expect(&token::OpenDelim(token::Brace))); try!(self.expect(&token::OpenDelim(token::Brace)));
Ok((self.parse_inner_attributes(), Ok((try!(self.parse_inner_attributes()),
try!(self.parse_block_tail(lo, DefaultBlock)))) try!(self.parse_block_tail(lo, DefaultBlock))))
} }
@ -4436,7 +4440,7 @@ impl<'a> Parser<'a> {
pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> { pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> {
maybe_whole!(no_clone self, NtImplItem); maybe_whole!(no_clone self, NtImplItem);
let mut attrs = self.parse_outer_attributes(); let mut attrs = try!(self.parse_outer_attributes());
let lo = self.span.lo; let lo = self.span.lo;
let vis = try!(self.parse_visibility()); let vis = try!(self.parse_visibility());
let (name, node) = if try!(self.eat_keyword(keywords::Type)) { let (name, node) = if try!(self.eat_keyword(keywords::Type)) {
@ -4613,7 +4617,7 @@ impl<'a> Parser<'a> {
generics.where_clause = try!(self.parse_where_clause()); generics.where_clause = try!(self.parse_where_clause());
try!(self.expect(&token::OpenDelim(token::Brace))); try!(self.expect(&token::OpenDelim(token::Brace)));
let attrs = self.parse_inner_attributes(); let attrs = try!(self.parse_inner_attributes());
let mut impl_items = vec![]; let mut impl_items = vec![];
while !try!(self.eat(&token::CloseDelim(token::Brace))) { while !try!(self.eat(&token::CloseDelim(token::Brace))) {
@ -4732,7 +4736,7 @@ impl<'a> Parser<'a> {
&token::CloseDelim(token::Paren), &token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma), seq_sep_trailing_allowed(token::Comma),
|p| { |p| {
let attrs = p.parse_outer_attributes(); let attrs = try!(p.parse_outer_attributes());
let lo = p.span.lo; let lo = p.span.lo;
let struct_field_ = ast::StructField_ { let struct_field_ = ast::StructField_ {
kind: UnnamedField(try!(p.parse_visibility())), kind: UnnamedField(try!(p.parse_visibility())),
@ -4774,7 +4778,7 @@ impl<'a> Parser<'a> {
/// Parse an element of a struct definition /// Parse an element of a struct definition
fn parse_struct_decl_field(&mut self, allow_pub: bool) -> PResult<StructField> { fn parse_struct_decl_field(&mut self, allow_pub: bool) -> PResult<StructField> {
let attrs = self.parse_outer_attributes(); let attrs = try!(self.parse_outer_attributes());
if try!(self.eat_keyword(keywords::Pub) ){ if try!(self.eat_keyword(keywords::Pub) ){
if !allow_pub { if !allow_pub {
@ -4846,7 +4850,7 @@ impl<'a> Parser<'a> {
let mod_inner_lo = self.span.lo; let mod_inner_lo = self.span.lo;
let old_owns_directory = self.owns_directory; let old_owns_directory = self.owns_directory;
self.owns_directory = true; self.owns_directory = true;
let attrs = self.parse_inner_attributes(); let attrs = try!(self.parse_inner_attributes());
let m = try!(self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)); let m = try!(self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo));
self.owns_directory = old_owns_directory; self.owns_directory = old_owns_directory;
self.pop_mod_path(); self.pop_mod_path();
@ -4995,7 +4999,7 @@ impl<'a> Parser<'a> {
Some(name), Some(name),
id_sp); id_sp);
let mod_inner_lo = p0.span.lo; let mod_inner_lo = p0.span.lo;
let mod_attrs = p0.parse_inner_attributes(); let mod_attrs = try!(p0.parse_inner_attributes());
let m0 = try!(p0.parse_mod_items(&token::Eof, mod_inner_lo)); let m0 = try!(p0.parse_mod_items(&token::Eof, mod_inner_lo));
self.sess.included_mod_stack.borrow_mut().pop(); self.sess.included_mod_stack.borrow_mut().pop();
Ok((ast::ItemMod(m0), mod_attrs)) Ok((ast::ItemMod(m0), mod_attrs))
@ -5098,7 +5102,7 @@ impl<'a> Parser<'a> {
let abi = opt_abi.unwrap_or(abi::C); let abi = opt_abi.unwrap_or(abi::C);
attrs.extend(self.parse_inner_attributes()); attrs.extend(try!(self.parse_inner_attributes()));
let mut foreign_items = vec![]; let mut foreign_items = vec![];
while let Some(item) = try!(self.parse_foreign_item()) { while let Some(item) = try!(self.parse_foreign_item()) {
@ -5148,7 +5152,7 @@ impl<'a> Parser<'a> {
let mut all_nullary = true; let mut all_nullary = true;
let mut any_disr = None; let mut any_disr = None;
while self.token != token::CloseDelim(token::Brace) { while self.token != token::CloseDelim(token::Brace) {
let variant_attrs = self.parse_outer_attributes(); let variant_attrs = try!(self.parse_outer_attributes());
let vlo = self.span.lo; let vlo = self.span.lo;
let struct_def; let struct_def;
@ -5510,7 +5514,7 @@ impl<'a> Parser<'a> {
/// Parse a foreign item. /// Parse a foreign item.
fn parse_foreign_item(&mut self) -> PResult<Option<P<ForeignItem>>> { fn parse_foreign_item(&mut self) -> PResult<Option<P<ForeignItem>>> {
let attrs = self.parse_outer_attributes(); let attrs = try!(self.parse_outer_attributes());
let lo = self.span.lo; let lo = self.span.lo;
let visibility = try!(self.parse_visibility()); let visibility = try!(self.parse_visibility());
@ -5610,7 +5614,7 @@ impl<'a> Parser<'a> {
} }
pub fn parse_item_nopanic(&mut self) -> PResult<Option<P<Item>>> { pub fn parse_item_nopanic(&mut self) -> PResult<Option<P<Item>>> {
let attrs = self.parse_outer_attributes(); let attrs = try!(self.parse_outer_attributes());
self.parse_item_(attrs, true) self.parse_item_(attrs, true)
} }
@ -5729,7 +5733,7 @@ impl<'a> Parser<'a> {
pub fn parse_crate_mod(&mut self) -> PResult<Crate> { pub fn parse_crate_mod(&mut self) -> PResult<Crate> {
let lo = self.span.lo; let lo = self.span.lo;
Ok(ast::Crate { Ok(ast::Crate {
attrs: self.parse_inner_attributes(), attrs: try!(self.parse_inner_attributes()),
module: try!(self.parse_mod_items(&token::Eof, lo)), module: try!(self.parse_mod_items(&token::Eof, lo)),
config: self.cfg.clone(), config: self.cfg.clone(),
span: mk_sp(lo, self.span.lo), span: mk_sp(lo, self.span.lo),

View file

@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
use ast; use ast;
use parse::{ParseSess,filemap_to_tts}; use parse::{ParseSess,PResult,filemap_to_tts};
use parse::new_parser_from_source_str; use parse::new_parser_from_source_str;
use parse::parser::Parser; use parse::parser::Parser;
use parse::token; use parse::token;
@ -31,11 +31,11 @@ pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a>
} }
fn with_error_checking_parse<T, F>(s: String, f: F) -> T where fn with_error_checking_parse<T, F>(s: String, f: F) -> T where
F: FnOnce(&mut Parser) -> T, F: FnOnce(&mut Parser) -> PResult<T>,
{ {
let ps = ParseSess::new(); let ps = ParseSess::new();
let mut p = string_to_parser(&ps, s); let mut p = string_to_parser(&ps, s);
let x = f(&mut p); let x = panictry!(f(&mut p));
p.abort_if_errors(); p.abort_if_errors();
x x
} }
@ -43,39 +43,37 @@ fn with_error_checking_parse<T, F>(s: String, f: F) -> T where
/// Parse a string, return a crate. /// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate { pub fn string_to_crate (source_str : String) -> ast::Crate {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
panictry!(p.parse_crate_mod()) p.parse_crate_mod()
}) })
} }
/// Parse a string, return an expr /// Parse a string, return an expr
pub fn string_to_expr (source_str : String) -> P<ast::Expr> { pub fn string_to_expr (source_str : String) -> P<ast::Expr> {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_expr() p.parse_expr_nopanic()
}) })
} }
/// Parse a string, return an item /// Parse a string, return an item
pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> { pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_item() p.parse_item_nopanic()
}) })
} }
/// Parse a string, return a stmt /// Parse a string, return a stmt
pub fn string_to_stmt(source_str : String) -> P<ast::Stmt> { pub fn string_to_stmt(source_str : String) -> Option<P<ast::Stmt>> {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_stmt().unwrap() p.parse_stmt_nopanic()
}) })
} }
/// Parse a string, return a pat. Uses "irrefutable"... which doesn't /// Parse a string, return a pat. Uses "irrefutable"... which doesn't
/// (currently) affect parsing. /// (currently) affect parsing.
pub fn string_to_pat(source_str: String) -> P<ast::Pat> { pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
// Binding `sess` and `parser` works around dropck-injected with_error_checking_parse(source_str, |p| {
// region-inference issues; see #25212, #22323, #22321. p.parse_pat_nopanic()
let sess = ParseSess::new(); })
let mut parser = string_to_parser(&sess, source_str);
parser.parse_pat()
} }
/// Convert a vector of strings to a vector of ast::Ident's /// Convert a vector of strings to a vector of ast::Ident's

View file

@ -54,7 +54,7 @@ fn expand_identity(cx: &mut ExtCtxt, _span: Span, tts: &[TokenTree])
// Parse an expression and emit it unchanged. // Parse an expression and emit it unchanged.
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), let mut parser = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), tts.to_vec()); cx.cfg(), tts.to_vec());
let expr = parser.parse_expr(); let expr = parser.parse_expr_panic();
MacEager::expr(quote_expr!(&mut *cx, $expr)) MacEager::expr(quote_expr!(&mut *cx, $expr))
} }