auto merge of #5120 : jbclements/rust/macros-have-scope, r=pcwalton
r? After this patch, macros declared in a module, function, or block can only be used inside of that module, function or block, with the exception of modules declared with the #[macro_escape] attribute; these modules allow macros to escape, and can be used as a limited macro export mechanism. This pull request also includes miscellaneous comments, lots of new test cases, a few renamings, and a few as-yet-unused data definitions for hygiene.
This commit is contained in:
commit
99a902c81d
21 changed files with 646 additions and 196 deletions
|
@ -186,12 +186,9 @@ fn SipState(key0: u64, key1: u64) -> SipState {
|
||||||
state
|
state
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// sadly, these macro definitions can't appear later,
|
||||||
impl io::Writer for SipState {
|
// because they're needed in the following defs;
|
||||||
|
// this design could be improved.
|
||||||
// Methods for io::writer
|
|
||||||
#[inline(always)]
|
|
||||||
fn write(&self, msg: &[const u8]) {
|
|
||||||
|
|
||||||
macro_rules! u8to64_le (
|
macro_rules! u8to64_le (
|
||||||
($buf:expr, $i:expr) =>
|
($buf:expr, $i:expr) =>
|
||||||
|
@ -203,12 +200,12 @@ impl io::Writer for SipState {
|
||||||
$buf[5+$i] as u64 << 40 |
|
$buf[5+$i] as u64 << 40 |
|
||||||
$buf[6+$i] as u64 << 48 |
|
$buf[6+$i] as u64 << 48 |
|
||||||
$buf[7+$i] as u64 << 56)
|
$buf[7+$i] as u64 << 56)
|
||||||
);
|
)
|
||||||
|
|
||||||
macro_rules! rotl (
|
macro_rules! rotl (
|
||||||
($x:expr, $b:expr) =>
|
($x:expr, $b:expr) =>
|
||||||
(($x << $b) | ($x >> (64 - $b)))
|
(($x << $b) | ($x >> (64 - $b)))
|
||||||
);
|
)
|
||||||
|
|
||||||
macro_rules! compress (
|
macro_rules! compress (
|
||||||
($v0:expr, $v1:expr, $v2:expr, $v3:expr) =>
|
($v0:expr, $v1:expr, $v2:expr, $v3:expr) =>
|
||||||
|
@ -220,7 +217,14 @@ impl io::Writer for SipState {
|
||||||
$v2 += $v1; $v1 = rotl!($v1, 17); $v1 ^= $v2;
|
$v2 += $v1; $v1 = rotl!($v1, 17); $v1 ^= $v2;
|
||||||
$v2 = rotl!($v2, 32);
|
$v2 = rotl!($v2, 32);
|
||||||
})
|
})
|
||||||
);
|
)
|
||||||
|
|
||||||
|
|
||||||
|
impl io::Writer for SipState {
|
||||||
|
|
||||||
|
// Methods for io::writer
|
||||||
|
#[inline(always)]
|
||||||
|
fn write(&self, msg: &[const u8]) {
|
||||||
|
|
||||||
let length = msg.len();
|
let length = msg.len();
|
||||||
self.length += length;
|
self.length += length;
|
||||||
|
|
|
@ -172,11 +172,6 @@ use syntax::ast_util;
|
||||||
use syntax::codemap::span;
|
use syntax::codemap::span;
|
||||||
use syntax::print::pprust::pat_to_str;
|
use syntax::print::pprust::pat_to_str;
|
||||||
|
|
||||||
pub fn macros() {
|
|
||||||
// FIXME(#3114): Macro import/export.
|
|
||||||
include!("macros.rs");
|
|
||||||
}
|
|
||||||
|
|
||||||
// An option identifying a literal: either a unit-like struct or an
|
// An option identifying a literal: either a unit-like struct or an
|
||||||
// expression.
|
// expression.
|
||||||
pub enum Lit {
|
pub enum Lit {
|
||||||
|
|
|
@ -18,11 +18,6 @@ use middle::trans::datum::*;
|
||||||
|
|
||||||
use core::str;
|
use core::str;
|
||||||
|
|
||||||
pub fn macros() {
|
|
||||||
// FIXME(#3114): Macro import/export.
|
|
||||||
include!("macros.rs");
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn trans_block(bcx: block, b: &ast::blk, dest: expr::Dest) -> block {
|
pub fn trans_block(bcx: block, b: &ast::blk, dest: expr::Dest) -> block {
|
||||||
let _icx = bcx.insn_ctxt("trans_block");
|
let _icx = bcx.insn_ctxt("trans_block");
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
|
|
|
@ -149,8 +149,6 @@ use syntax::codemap::spanned;
|
||||||
// These are passed around by the code generating functions to track the
|
// These are passed around by the code generating functions to track the
|
||||||
// destination of a computation's value.
|
// destination of a computation's value.
|
||||||
|
|
||||||
fn macros() { include!("macros.rs"); } // FIXME(#3114): Macro import/export.
|
|
||||||
|
|
||||||
pub enum Dest {
|
pub enum Dest {
|
||||||
SaveIn(ValueRef),
|
SaveIn(ValueRef),
|
||||||
Ignore,
|
Ignore,
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
{
|
#[macro_escape];
|
||||||
|
|
||||||
macro_rules! unpack_datum(
|
macro_rules! unpack_datum(
|
||||||
($bcx: ident, $inp: expr) => (
|
($bcx: ident, $inp: expr) => (
|
||||||
|
@ -18,7 +18,7 @@ macro_rules! unpack_datum(
|
||||||
db.datum
|
db.datum
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
|
|
||||||
macro_rules! unpack_result(
|
macro_rules! unpack_result(
|
||||||
($bcx: ident, $inp: expr) => (
|
($bcx: ident, $inp: expr) => (
|
||||||
|
@ -28,7 +28,7 @@ macro_rules! unpack_result(
|
||||||
db.val
|
db.val
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
|
|
||||||
macro_rules! trace_span(
|
macro_rules! trace_span(
|
||||||
($bcx: ident, $sp: expr, $str: expr) => (
|
($bcx: ident, $sp: expr, $str: expr) => (
|
||||||
|
@ -39,7 +39,7 @@ macro_rules! trace_span(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
|
|
||||||
macro_rules! trace(
|
macro_rules! trace(
|
||||||
($bcx: ident, $str: expr) => (
|
($bcx: ident, $str: expr) => (
|
||||||
|
@ -50,6 +50,5 @@ macro_rules! trace(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -39,11 +39,6 @@ use syntax::ast_util::local_def;
|
||||||
use syntax::print::pprust::expr_to_str;
|
use syntax::print::pprust::expr_to_str;
|
||||||
use syntax::{ast, ast_map};
|
use syntax::{ast, ast_map};
|
||||||
|
|
||||||
pub fn macros() {
|
|
||||||
// FIXME(#3114): Macro import/export.
|
|
||||||
include!("macros.rs");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
The main "translation" pass for methods. Generates code
|
The main "translation" pass for methods. Generates code
|
||||||
for non-monomorphized methods only. Other methods will
|
for non-monomorphized methods only. Other methods will
|
||||||
|
|
|
@ -72,11 +72,6 @@ use syntax::ast::{Onceness, purity, ret_style};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::codemap::span;
|
use syntax::codemap::span;
|
||||||
|
|
||||||
pub fn macros() {
|
|
||||||
// FIXME(#3114): Macro import/export.
|
|
||||||
include!("macros.rs");
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Combine {
|
pub trait Combine {
|
||||||
fn infcx(&self) -> @mut InferCtxt;
|
fn infcx(&self) -> @mut InferCtxt;
|
||||||
fn tag(&self) -> ~str;
|
fn tag(&self) -> ~str;
|
||||||
|
|
|
@ -24,11 +24,6 @@ use std::list;
|
||||||
use syntax::ast::{Many, Once, extern_fn, m_const, impure_fn, noreturn};
|
use syntax::ast::{Many, Once, extern_fn, m_const, impure_fn, noreturn};
|
||||||
use syntax::ast::{pure_fn, ret_style, return_val, unsafe_fn};
|
use syntax::ast::{pure_fn, ret_style, return_val, unsafe_fn};
|
||||||
|
|
||||||
pub fn macros() {
|
|
||||||
// FIXME(#3114): Macro import/export.
|
|
||||||
include!("macros.rs");
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum Lub = CombineFields; // least-upper-bound: common supertype
|
pub enum Lub = CombineFields; // least-upper-bound: common supertype
|
||||||
|
|
||||||
pub impl Lub {
|
pub impl Lub {
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
{
|
#[macro_escape];
|
||||||
|
|
||||||
macro_rules! if_ok(
|
macro_rules! if_ok(
|
||||||
($inp: expr) => (
|
($inp: expr) => (
|
||||||
|
@ -17,6 +17,5 @@ macro_rules! if_ok(
|
||||||
Err(e) => { return Err(e); }
|
Err(e) => { return Err(e); }
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -287,6 +287,7 @@ use syntax::codemap;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::codemap::span;
|
use syntax::codemap::span;
|
||||||
|
|
||||||
|
pub mod macros;
|
||||||
pub mod combine;
|
pub mod combine;
|
||||||
pub mod glb;
|
pub mod glb;
|
||||||
pub mod lattice;
|
pub mod lattice;
|
||||||
|
|
|
@ -25,10 +25,6 @@ use std::list::Nil;
|
||||||
use std::list;
|
use std::list;
|
||||||
use syntax::ast::{m_const, purity, ret_style};
|
use syntax::ast::{m_const, purity, ret_style};
|
||||||
|
|
||||||
pub fn macros() {
|
|
||||||
// FIXME(#3114): Macro import/export.
|
|
||||||
include!("macros.rs");
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum Sub = CombineFields; // "subtype", "subregion" etc
|
pub enum Sub = CombineFields; // "subtype", "subregion" etc
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,7 @@ use back_ = back;
|
||||||
|
|
||||||
pub mod middle {
|
pub mod middle {
|
||||||
pub mod trans {
|
pub mod trans {
|
||||||
|
pub mod macros;
|
||||||
pub mod inline;
|
pub mod inline;
|
||||||
pub mod monomorphize;
|
pub mod monomorphize;
|
||||||
pub mod controlflow;
|
pub mod controlflow;
|
||||||
|
|
|
@ -29,8 +29,37 @@ macro_rules! interner_key (
|
||||||
(-3 as uint, 0u)))
|
(-3 as uint, 0u)))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// an identifier contains an index into the interner
|
||||||
|
// table and a SyntaxContext to track renaming and
|
||||||
|
// macro expansion per Flatt et al., "Macros
|
||||||
|
// That Work Together"
|
||||||
#[deriving_eq]
|
#[deriving_eq]
|
||||||
pub struct ident { repr: uint }
|
pub struct ident { repr: Name }
|
||||||
|
|
||||||
|
// a SyntaxContext represents a chain of macro-expandings
|
||||||
|
// and renamings. Each macro expansion corresponds to
|
||||||
|
// a fresh uint
|
||||||
|
#[deriving_eq]
|
||||||
|
pub enum SyntaxContext {
|
||||||
|
MT,
|
||||||
|
Mark (Mrk,~SyntaxContext),
|
||||||
|
Rename (~ident,Name,~SyntaxContext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
// ** this is going to have to apply to paths, not to idents.
|
||||||
|
// Returns true if these two identifiers access the same
|
||||||
|
// local binding or top-level binding... that's what it
|
||||||
|
// should do. For now, it just compares the names.
|
||||||
|
pub fn free_ident_eq (a : ident, b: ident) -> bool{
|
||||||
|
a.repr == b.repr
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
// a name represents a string, interned
|
||||||
|
type Name = uint;
|
||||||
|
// a mark represents a unique id associated
|
||||||
|
// with a macro expansion
|
||||||
|
type Mrk = uint;
|
||||||
|
|
||||||
pub impl<S:Encoder> Encodable<S> for ident {
|
pub impl<S:Encoder> Encodable<S> for ident {
|
||||||
fn encode(&self, s: &S) {
|
fn encode(&self, s: &S) {
|
||||||
|
@ -1230,6 +1259,7 @@ pub enum item_ {
|
||||||
Option<@trait_ref>, // (optional) trait this impl implements
|
Option<@trait_ref>, // (optional) trait this impl implements
|
||||||
@Ty, // self
|
@Ty, // self
|
||||||
~[@method]),
|
~[@method]),
|
||||||
|
// a macro invocation (which includes macro definition)
|
||||||
item_mac(mac),
|
item_mac(mac),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1173,6 +1173,8 @@ mod test {
|
||||||
CallToEmitEnumVariantArg(uint),
|
CallToEmitEnumVariantArg(uint),
|
||||||
CallToEmitUint(uint),
|
CallToEmitUint(uint),
|
||||||
CallToEmitNil,
|
CallToEmitNil,
|
||||||
|
CallToEmitStruct(~str,uint),
|
||||||
|
CallToEmitField(~str,uint),
|
||||||
// all of the ones I was too lazy to handle:
|
// all of the ones I was too lazy to handle:
|
||||||
CallToOther
|
CallToOther
|
||||||
}
|
}
|
||||||
|
@ -1251,11 +1253,11 @@ mod test {
|
||||||
fn emit_rec(&self, f: fn()) {
|
fn emit_rec(&self, f: fn()) {
|
||||||
self.add_unknown_to_log(); f();
|
self.add_unknown_to_log(); f();
|
||||||
}
|
}
|
||||||
fn emit_struct(&self, _name: &str, +_len: uint, f: fn()) {
|
fn emit_struct(&self, name: &str, +len: uint, f: fn()) {
|
||||||
self.add_unknown_to_log(); f();
|
self.add_to_log(CallToEmitStruct (name.to_str(),len)); f();
|
||||||
}
|
}
|
||||||
fn emit_field(&self, _name: &str, +_idx: uint, f: fn()) {
|
fn emit_field(&self, name: &str, +idx: uint, f: fn()) {
|
||||||
self.add_unknown_to_log(); f();
|
self.add_to_log(CallToEmitField (name.to_str(),idx)); f();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_tup(&self, +_len: uint, f: fn()) {
|
fn emit_tup(&self, +_len: uint, f: fn()) {
|
||||||
|
@ -1267,23 +1269,12 @@ mod test {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[auto_decode]
|
|
||||||
#[auto_encode]
|
|
||||||
struct Node {id: uint}
|
|
||||||
|
|
||||||
fn to_call_log (val: Encodable<TestEncoder>) -> ~[call] {
|
fn to_call_log (val: Encodable<TestEncoder>) -> ~[call] {
|
||||||
let mut te = TestEncoder {call_log: @mut ~[]};
|
let mut te = TestEncoder {call_log: @mut ~[]};
|
||||||
val.encode(&te);
|
val.encode(&te);
|
||||||
copy *te.call_log
|
copy *te.call_log
|
||||||
}
|
}
|
||||||
/*
|
|
||||||
#[test] fn encode_test () {
|
|
||||||
check_equal (to_call_log(Node{id:34}
|
|
||||||
as Encodable::<std::json::Encoder>),
|
|
||||||
~[CallToEnum (~"Node"),
|
|
||||||
CallToEnumVariant]);
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
#[auto_encode]
|
#[auto_encode]
|
||||||
enum Written {
|
enum Written {
|
||||||
Book(uint,uint),
|
Book(uint,uint),
|
||||||
|
@ -1300,4 +1291,17 @@ mod test {
|
||||||
CallToEmitEnumVariantArg (1),
|
CallToEmitEnumVariantArg (1),
|
||||||
CallToEmitUint (44)]);
|
CallToEmitUint (44)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum BPos = uint;
|
||||||
|
|
||||||
|
#[auto_encode]
|
||||||
|
pub struct HasPos { pos : BPos }
|
||||||
|
|
||||||
|
#[test] fn encode_newtype_test () {
|
||||||
|
check_equal (to_call_log (HasPos {pos:BPos(48)}
|
||||||
|
as Encodable::<TestEncoder>),
|
||||||
|
~[CallToEmitStruct(~"HasPos",1),
|
||||||
|
CallToEmitField(~"pos",0),
|
||||||
|
CallToEmitUint(48)]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,12 +21,12 @@ use parse::{parser, token};
|
||||||
|
|
||||||
use core::io;
|
use core::io;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
use std::oldmap::HashMap;
|
use core::hashmap::linear::LinearMap;
|
||||||
|
|
||||||
// new-style macro! tt code:
|
// new-style macro! tt code:
|
||||||
//
|
//
|
||||||
// SyntaxExpanderTT, SyntaxExpanderTTItem, MacResult,
|
// SyntaxExpanderTT, SyntaxExpanderTTItem, MacResult,
|
||||||
// NormalTT, ItemTT
|
// NormalTT, IdentTT
|
||||||
//
|
//
|
||||||
// also note that ast::mac used to have a bunch of extraneous cases and
|
// also note that ast::mac used to have a bunch of extraneous cases and
|
||||||
// is now probably a redundant AST node, can be merged with
|
// is now probably a redundant AST node, can be merged with
|
||||||
|
@ -71,25 +71,55 @@ pub enum SyntaxExtension {
|
||||||
// Token-tree expanders
|
// Token-tree expanders
|
||||||
NormalTT(SyntaxExpanderTT),
|
NormalTT(SyntaxExpanderTT),
|
||||||
|
|
||||||
|
// An IdentTT is a macro that has an
|
||||||
|
// identifier in between the name of the
|
||||||
|
// macro and the argument. Currently,
|
||||||
|
// the only examples of this are
|
||||||
|
// macro_rules! and proto!
|
||||||
|
|
||||||
// perhaps macro_rules! will lose its odd special identifier argument,
|
// perhaps macro_rules! will lose its odd special identifier argument,
|
||||||
// and this can go away also
|
// and this can go away also
|
||||||
ItemTT(SyntaxExpanderTTItem),
|
IdentTT(SyntaxExpanderTTItem),
|
||||||
}
|
}
|
||||||
|
|
||||||
type SyntaxExtensions = HashMap<@~str, SyntaxExtension>;
|
type SyntaxEnv = @mut MapChain<Name, Transformer>;
|
||||||
|
|
||||||
// A temporary hard-coded map of methods for expanding syntax extension
|
// Name : the domain of SyntaxEnvs
|
||||||
|
// want to change these to uints....
|
||||||
|
// note that we use certain strings that are not legal as identifiers
|
||||||
|
// to indicate, for instance, how blocks are supposed to behave.
|
||||||
|
type Name = @~str;
|
||||||
|
|
||||||
|
// Transformer : the codomain of SyntaxEnvs
|
||||||
|
|
||||||
|
// NB: it may seem crazy to lump both of these into one environment;
|
||||||
|
// what would it mean to bind "foo" to BlockLimit(true)? The idea
|
||||||
|
// is that this follows the lead of MTWT, and accommodates growth
|
||||||
|
// toward a more uniform syntax syntax (sorry) where blocks are just
|
||||||
|
// another kind of transformer.
|
||||||
|
|
||||||
|
enum Transformer {
|
||||||
|
// this identifier maps to a syntax extension or macro
|
||||||
|
SE(SyntaxExtension),
|
||||||
|
// should blocks occurring here limit macro scopes?
|
||||||
|
ScopeMacros(bool)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The base map of methods for expanding syntax extension
|
||||||
// AST nodes into full ASTs
|
// AST nodes into full ASTs
|
||||||
pub fn syntax_expander_table() -> SyntaxExtensions {
|
pub fn syntax_expander_table() -> SyntaxEnv {
|
||||||
// utility function to simplify creating NormalTT syntax extensions
|
// utility function to simplify creating NormalTT syntax extensions
|
||||||
fn builtin_normal_tt(f: SyntaxExpanderTTFun) -> SyntaxExtension {
|
fn builtin_normal_tt(f: SyntaxExpanderTTFun) -> @Transformer {
|
||||||
NormalTT(SyntaxExpanderTT{expander: f, span: None})
|
@SE(NormalTT(SyntaxExpanderTT{expander: f, span: None}))
|
||||||
}
|
}
|
||||||
// utility function to simplify creating ItemTT syntax extensions
|
// utility function to simplify creating IdentTT syntax extensions
|
||||||
fn builtin_item_tt(f: SyntaxExpanderTTItemFun) -> SyntaxExtension {
|
fn builtin_item_tt(f: SyntaxExpanderTTItemFun) -> @Transformer {
|
||||||
ItemTT(SyntaxExpanderTTItem{expander: f, span: None})
|
@SE(IdentTT(SyntaxExpanderTTItem{expander: f, span: None}))
|
||||||
}
|
}
|
||||||
let syntax_expanders = HashMap();
|
let mut syntax_expanders = LinearMap::new();
|
||||||
|
// NB identifier starts with space, and can't conflict with legal idents
|
||||||
|
syntax_expanders.insert(@~" block",
|
||||||
|
@ScopeMacros(true));
|
||||||
syntax_expanders.insert(@~"macro_rules",
|
syntax_expanders.insert(@~"macro_rules",
|
||||||
builtin_item_tt(
|
builtin_item_tt(
|
||||||
ext::tt::macro_rules::add_new_extension));
|
ext::tt::macro_rules::add_new_extension));
|
||||||
|
@ -97,10 +127,10 @@ pub fn syntax_expander_table() -> SyntaxExtensions {
|
||||||
builtin_normal_tt(ext::fmt::expand_syntax_ext));
|
builtin_normal_tt(ext::fmt::expand_syntax_ext));
|
||||||
syntax_expanders.insert(
|
syntax_expanders.insert(
|
||||||
@~"auto_encode",
|
@~"auto_encode",
|
||||||
ItemDecorator(ext::auto_encode::expand_auto_encode));
|
@SE(ItemDecorator(ext::auto_encode::expand_auto_encode)));
|
||||||
syntax_expanders.insert(
|
syntax_expanders.insert(
|
||||||
@~"auto_decode",
|
@~"auto_decode",
|
||||||
ItemDecorator(ext::auto_encode::expand_auto_decode));
|
@SE(ItemDecorator(ext::auto_encode::expand_auto_decode)));
|
||||||
syntax_expanders.insert(@~"env",
|
syntax_expanders.insert(@~"env",
|
||||||
builtin_normal_tt(ext::env::expand_syntax_ext));
|
builtin_normal_tt(ext::env::expand_syntax_ext));
|
||||||
syntax_expanders.insert(@~"concat_idents",
|
syntax_expanders.insert(@~"concat_idents",
|
||||||
|
@ -110,11 +140,11 @@ pub fn syntax_expander_table() -> SyntaxExtensions {
|
||||||
builtin_normal_tt(
|
builtin_normal_tt(
|
||||||
ext::log_syntax::expand_syntax_ext));
|
ext::log_syntax::expand_syntax_ext));
|
||||||
syntax_expanders.insert(@~"deriving_eq",
|
syntax_expanders.insert(@~"deriving_eq",
|
||||||
ItemDecorator(
|
@SE(ItemDecorator(
|
||||||
ext::deriving::expand_deriving_eq));
|
ext::deriving::expand_deriving_eq)));
|
||||||
syntax_expanders.insert(@~"deriving_iter_bytes",
|
syntax_expanders.insert(@~"deriving_iter_bytes",
|
||||||
ItemDecorator(
|
@SE(ItemDecorator(
|
||||||
ext::deriving::expand_deriving_iter_bytes));
|
ext::deriving::expand_deriving_iter_bytes)));
|
||||||
|
|
||||||
// Quasi-quoting expanders
|
// Quasi-quoting expanders
|
||||||
syntax_expanders.insert(@~"quote_tokens",
|
syntax_expanders.insert(@~"quote_tokens",
|
||||||
|
@ -159,7 +189,7 @@ pub fn syntax_expander_table() -> SyntaxExtensions {
|
||||||
syntax_expanders.insert(
|
syntax_expanders.insert(
|
||||||
@~"trace_macros",
|
@~"trace_macros",
|
||||||
builtin_normal_tt(ext::trace_macros::expand_trace_macros));
|
builtin_normal_tt(ext::trace_macros::expand_trace_macros));
|
||||||
return syntax_expanders;
|
MapChain::new(~syntax_expanders)
|
||||||
}
|
}
|
||||||
|
|
||||||
// One of these is made during expansion and incrementally updated as we go;
|
// One of these is made during expansion and incrementally updated as we go;
|
||||||
|
@ -348,6 +378,149 @@ pub fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree])
|
||||||
es
|
es
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// in order to have some notion of scoping for macros,
|
||||||
|
// we want to implement the notion of a transformation
|
||||||
|
// environment.
|
||||||
|
|
||||||
|
// This environment maps Names to Transformers.
|
||||||
|
// Initially, this includes macro definitions and
|
||||||
|
// block directives.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Actually, the following implementation is parameterized
|
||||||
|
// by both key and value types.
|
||||||
|
|
||||||
|
//impl question: how to implement it? Initially, the
|
||||||
|
// env will contain only macros, so it might be painful
|
||||||
|
// to add an empty frame for every context. Let's just
|
||||||
|
// get it working, first....
|
||||||
|
|
||||||
|
// NB! the mutability of the underlying maps means that
|
||||||
|
// if expansion is out-of-order, a deeper scope may be
|
||||||
|
// able to refer to a macro that was added to an enclosing
|
||||||
|
// scope lexically later than the deeper scope.
|
||||||
|
|
||||||
|
// Note on choice of representation: I've been pushed to
|
||||||
|
// use a top-level managed pointer by some difficulties
|
||||||
|
// with pushing and popping functionally, and the ownership
|
||||||
|
// issues. As a result, the values returned by the table
|
||||||
|
// also need to be managed; the &self/... type that Maps
|
||||||
|
// return won't work for things that need to get outside
|
||||||
|
// of that managed pointer. The easiest way to do this
|
||||||
|
// is just to insist that the values in the tables are
|
||||||
|
// managed to begin with.
|
||||||
|
|
||||||
|
// a transformer env is either a base map or a map on top
|
||||||
|
// of another chain.
|
||||||
|
pub enum MapChain<K,V> {
|
||||||
|
BaseMapChain(~LinearMap<K,@V>),
|
||||||
|
ConsMapChain(~LinearMap<K,@V>,@mut MapChain<K,V>)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// get the map from an env frame
|
||||||
|
impl <K: Eq + Hash + IterBytes ,V: Copy> MapChain<K,V>{
|
||||||
|
|
||||||
|
// Constructor. I don't think we need a zero-arg one.
|
||||||
|
static fn new(+init: ~LinearMap<K,@V>) -> @mut MapChain<K,V> {
|
||||||
|
@mut BaseMapChain(init)
|
||||||
|
}
|
||||||
|
|
||||||
|
// add a new frame to the environment (functionally)
|
||||||
|
fn push_frame (@mut self) -> @mut MapChain<K,V> {
|
||||||
|
@mut ConsMapChain(~LinearMap::new() ,self)
|
||||||
|
}
|
||||||
|
|
||||||
|
// no need for pop, it'll just be functional.
|
||||||
|
|
||||||
|
// utility fn...
|
||||||
|
|
||||||
|
// ugh: can't get this to compile with mut because of the
|
||||||
|
// lack of flow sensitivity.
|
||||||
|
fn get_map(&self) -> &self/LinearMap<K,@V> {
|
||||||
|
match *self {
|
||||||
|
BaseMapChain (~ref map) => map,
|
||||||
|
ConsMapChain (~ref map,_) => map
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// traits just don't work anywhere...?
|
||||||
|
//pub impl Map<Name,SyntaxExtension> for MapChain {
|
||||||
|
|
||||||
|
pure fn contains_key (&self, key: &K) -> bool {
|
||||||
|
match *self {
|
||||||
|
BaseMapChain (ref map) => map.contains_key(key),
|
||||||
|
ConsMapChain (ref map,ref rest) =>
|
||||||
|
(map.contains_key(key)
|
||||||
|
|| rest.contains_key(key))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// should each_key and each_value operate on shadowed
|
||||||
|
// names? I think not.
|
||||||
|
// delaying implementing this....
|
||||||
|
pure fn each_key (&self, _f: &fn (&K)->bool) {
|
||||||
|
fail!(~"unimplemented 2013-02-15T10:01");
|
||||||
|
}
|
||||||
|
|
||||||
|
pure fn each_value (&self, _f: &fn (&V) -> bool) {
|
||||||
|
fail!(~"unimplemented 2013-02-15T10:02");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns a copy of the value that the name maps to.
|
||||||
|
// Goes down the chain 'til it finds one (or bottom out).
|
||||||
|
fn find (&self, key: &K) -> Option<@V> {
|
||||||
|
match self.get_map().find (key) {
|
||||||
|
Some(ref v) => Some(**v),
|
||||||
|
None => match *self {
|
||||||
|
BaseMapChain (_) => None,
|
||||||
|
ConsMapChain (_,ref rest) => rest.find(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert the binding into the top-level map
|
||||||
|
fn insert (&mut self, +key: K, +ext: @V) -> bool {
|
||||||
|
// can't abstract over get_map because of flow sensitivity...
|
||||||
|
match *self {
|
||||||
|
BaseMapChain (~ref mut map) => map.insert(key, ext),
|
||||||
|
ConsMapChain (~ref mut map,_) => map.insert(key,ext)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use super::MapChain;
|
||||||
|
use util::testing::check_equal;
|
||||||
|
|
||||||
|
#[test] fn testenv () {
|
||||||
|
let mut a = LinearMap::new();
|
||||||
|
a.insert (@~"abc",@15);
|
||||||
|
let m = MapChain::new(~a);
|
||||||
|
m.insert (@~"def",@16);
|
||||||
|
// FIXME: #4492 (ICE) check_equal(m.find(&@~"abc"),Some(@15));
|
||||||
|
// .... check_equal(m.find(&@~"def"),Some(@16));
|
||||||
|
check_equal(*(m.find(&@~"abc").get()),15);
|
||||||
|
check_equal(*(m.find(&@~"def").get()),16);
|
||||||
|
let n = m.push_frame();
|
||||||
|
// old bindings are still present:
|
||||||
|
check_equal(*(n.find(&@~"abc").get()),15);
|
||||||
|
check_equal(*(n.find(&@~"def").get()),16);
|
||||||
|
n.insert (@~"def",@17);
|
||||||
|
// n shows the new binding
|
||||||
|
check_equal(*(n.find(&@~"abc").get()),15);
|
||||||
|
check_equal(*(n.find(&@~"def").get()),17);
|
||||||
|
// ... but m still has the old ones
|
||||||
|
// FIXME: #4492: check_equal(m.find(&@~"abc"),Some(@15));
|
||||||
|
// FIXME: #4492: check_equal(m.find(&@~"def"),Some(@16));
|
||||||
|
check_equal(*(m.find(&@~"abc").get()),15);
|
||||||
|
check_equal(*(m.find(&@~"def").get()),16);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Local Variables:
|
// Local Variables:
|
||||||
// mode: rust
|
// mode: rust
|
||||||
|
|
|
@ -17,13 +17,13 @@ use attr;
|
||||||
use codemap::{span, CallInfo, ExpandedFrom, NameAndSpan};
|
use codemap::{span, CallInfo, ExpandedFrom, NameAndSpan};
|
||||||
use ext::base::*;
|
use ext::base::*;
|
||||||
use fold::*;
|
use fold::*;
|
||||||
use parse::{parser, parse_expr_from_source_str, new_parser_from_tts};
|
use parse::{parser, parse_item_from_source_str, new_parser_from_tts};
|
||||||
|
|
||||||
use core::option;
|
use core::option;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
use std::oldmap::HashMap;
|
use core::hashmap::LinearMap;
|
||||||
|
|
||||||
pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt,
|
pub fn expand_expr(extsbox: @mut SyntaxEnv, cx: ext_ctxt,
|
||||||
e: expr_, s: span, fld: ast_fold,
|
e: expr_, s: span, fld: ast_fold,
|
||||||
orig: fn@(expr_, span, ast_fold) -> (expr_, span))
|
orig: fn@(expr_, span, ast_fold) -> (expr_, span))
|
||||||
-> (expr_, span) {
|
-> (expr_, span) {
|
||||||
|
@ -41,13 +41,14 @@ pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
/* using idents and token::special_idents would make the
|
/* using idents and token::special_idents would make the
|
||||||
the macro names be hygienic */
|
the macro names be hygienic */
|
||||||
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
||||||
match exts.find(&extname) {
|
// leaving explicit deref here to highlight unbox op:
|
||||||
|
match (*extsbox).find(&extname) {
|
||||||
None => {
|
None => {
|
||||||
cx.span_fatal(pth.span,
|
cx.span_fatal(pth.span,
|
||||||
fmt!("macro undefined: '%s'", *extname))
|
fmt!("macro undefined: '%s'", *extname))
|
||||||
}
|
}
|
||||||
Some(NormalTT(SyntaxExpanderTT{expander: exp,
|
Some(@SE(NormalTT(SyntaxExpanderTT{expander: exp,
|
||||||
span: exp_sp})) => {
|
span: exp_sp}))) => {
|
||||||
cx.bt_push(ExpandedFrom(CallInfo{
|
cx.bt_push(ExpandedFrom(CallInfo{
|
||||||
call_site: s,
|
call_site: s,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
|
@ -92,7 +93,7 @@ pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
//
|
//
|
||||||
// NB: there is some redundancy between this and expand_item, below, and
|
// NB: there is some redundancy between this and expand_item, below, and
|
||||||
// they might benefit from some amount of semantic and language-UI merger.
|
// they might benefit from some amount of semantic and language-UI merger.
|
||||||
pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt,
|
pub fn expand_mod_items(extsbox: @mut SyntaxEnv, cx: ext_ctxt,
|
||||||
module_: ast::_mod, fld: ast_fold,
|
module_: ast::_mod, fld: ast_fold,
|
||||||
orig: fn@(ast::_mod, ast_fold) -> ast::_mod)
|
orig: fn@(ast::_mod, ast_fold) -> ast::_mod)
|
||||||
-> ast::_mod {
|
-> ast::_mod {
|
||||||
|
@ -106,9 +107,8 @@ pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
do vec::foldr(item.attrs, ~[*item]) |attr, items| {
|
do vec::foldr(item.attrs, ~[*item]) |attr, items| {
|
||||||
let mname = attr::get_attr_name(attr);
|
let mname = attr::get_attr_name(attr);
|
||||||
|
|
||||||
match exts.find(&mname) {
|
match (*extsbox).find(&mname) {
|
||||||
None | Some(NormalTT(_)) | Some(ItemTT(*)) => items,
|
Some(@SE(ItemDecorator(dec_fn))) => {
|
||||||
Some(ItemDecorator(dec_fn)) => {
|
|
||||||
cx.bt_push(ExpandedFrom(CallInfo {
|
cx.bt_push(ExpandedFrom(CallInfo {
|
||||||
call_site: attr.span,
|
call_site: attr.span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
|
@ -119,7 +119,8 @@ pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
let r = dec_fn(cx, attr.span, attr.node.value, items);
|
let r = dec_fn(cx, attr.span, attr.node.value, items);
|
||||||
cx.bt_pop();
|
cx.bt_pop();
|
||||||
r
|
r
|
||||||
}
|
},
|
||||||
|
_ => items,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -128,34 +129,94 @@ pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// eval $e with a new exts frame:
|
||||||
|
macro_rules! with_exts_frame (
|
||||||
|
($extsboxexpr:expr,$e:expr) =>
|
||||||
|
({let extsbox = $extsboxexpr;
|
||||||
|
let oldexts = *extsbox;
|
||||||
|
*extsbox = oldexts.push_frame();
|
||||||
|
let result = $e;
|
||||||
|
*extsbox = oldexts;
|
||||||
|
result
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
// When we enter a module, record it, for the sake of `module!`
|
// When we enter a module, record it, for the sake of `module!`
|
||||||
pub fn expand_item(exts: SyntaxExtensions,
|
pub fn expand_item(extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
|
cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
|
||||||
orig: fn@(&&v: @ast::item, ast_fold) -> Option<@ast::item>)
|
orig: fn@(&&v: @ast::item, ast_fold) -> Option<@ast::item>)
|
||||||
-> Option<@ast::item> {
|
-> Option<@ast::item> {
|
||||||
let is_mod = match it.node {
|
// need to do expansion first... it might turn out to be a module.
|
||||||
ast::item_mod(_) | ast::item_foreign_mod(_) => true,
|
|
||||||
_ => false
|
|
||||||
};
|
|
||||||
let maybe_it = match it.node {
|
let maybe_it = match it.node {
|
||||||
ast::item_mac(*) => expand_item_mac(exts, cx, it, fld),
|
ast::item_mac(*) => expand_item_mac(extsbox, cx, it, fld),
|
||||||
_ => Some(it)
|
_ => Some(it)
|
||||||
};
|
};
|
||||||
|
|
||||||
match maybe_it {
|
match maybe_it {
|
||||||
Some(it) => {
|
Some(it) => {
|
||||||
if is_mod { cx.mod_push(it.ident); }
|
match it.node {
|
||||||
let ret_val = orig(it, fld);
|
ast::item_mod(_) | ast::item_foreign_mod(_) => {
|
||||||
if is_mod { cx.mod_pop(); }
|
cx.mod_push(it.ident);
|
||||||
return ret_val;
|
let result =
|
||||||
|
// don't push a macro scope for macro_escape:
|
||||||
|
if contains_macro_escape(it.attrs) {
|
||||||
|
orig(it,fld)
|
||||||
|
} else {
|
||||||
|
// otherwise, push a scope:
|
||||||
|
with_exts_frame!(extsbox,orig(it,fld))
|
||||||
|
};
|
||||||
|
cx.mod_pop();
|
||||||
|
result
|
||||||
}
|
}
|
||||||
None => return None
|
_ => orig(it,fld)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
None => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// does this attribute list contain "macro_escape" ?
|
||||||
|
fn contains_macro_escape (attrs: &[ast::attribute]) -> bool{
|
||||||
|
let mut accum = false;
|
||||||
|
do attrs.each |attr| {
|
||||||
|
let mname = attr::get_attr_name(attr);
|
||||||
|
if (mname == @~"macro_escape") {
|
||||||
|
accum = true;
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
accum
|
||||||
|
}
|
||||||
|
|
||||||
|
// this macro disables (one layer of) macro
|
||||||
|
// scoping, to allow a block to add macro bindings
|
||||||
|
// to its parent env
|
||||||
|
macro_rules! without_macro_scoping(
|
||||||
|
($extsexpr:expr,$exp:expr) =>
|
||||||
|
({
|
||||||
|
// only evaluate this once:
|
||||||
|
let exts = $extsexpr;
|
||||||
|
// capture the existing binding:
|
||||||
|
let existingBlockBinding =
|
||||||
|
match exts.find(&@~" block"){
|
||||||
|
Some(binding) => binding,
|
||||||
|
None => cx.bug("expected to find \" block\" binding")
|
||||||
|
};
|
||||||
|
// this prevents the block from limiting the macros' scope:
|
||||||
|
exts.insert(@~" block",@ScopeMacros(false));
|
||||||
|
let result = $exp;
|
||||||
|
// reset the block binding. Note that since the original
|
||||||
|
// one may have been inherited, this procedure may wind
|
||||||
|
// up introducing a block binding where one didn't exist
|
||||||
|
// before.
|
||||||
|
exts.insert(@~" block",existingBlockBinding);
|
||||||
|
result
|
||||||
|
}))
|
||||||
|
|
||||||
// Support for item-position macro invocations, exactly the same
|
// Support for item-position macro invocations, exactly the same
|
||||||
// logic as for expression-position macro invocations.
|
// logic as for expression-position macro invocations.
|
||||||
pub fn expand_item_mac(exts: SyntaxExtensions,
|
pub fn expand_item_mac(+extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt, &&it: @ast::item,
|
cx: ext_ctxt, &&it: @ast::item,
|
||||||
fld: ast_fold) -> Option<@ast::item> {
|
fld: ast_fold) -> Option<@ast::item> {
|
||||||
|
|
||||||
|
@ -167,11 +228,11 @@ pub fn expand_item_mac(exts: SyntaxExtensions,
|
||||||
};
|
};
|
||||||
|
|
||||||
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
||||||
let expanded = match exts.find(&extname) {
|
let expanded = match (*extsbox).find(&extname) {
|
||||||
None => cx.span_fatal(pth.span,
|
None => cx.span_fatal(pth.span,
|
||||||
fmt!("macro undefined: '%s!'", *extname)),
|
fmt!("macro undefined: '%s!'", *extname)),
|
||||||
|
|
||||||
Some(NormalTT(ref expand)) => {
|
Some(@SE(NormalTT(ref expand))) => {
|
||||||
if it.ident != parse::token::special_idents::invalid {
|
if it.ident != parse::token::special_idents::invalid {
|
||||||
cx.span_fatal(pth.span,
|
cx.span_fatal(pth.span,
|
||||||
fmt!("macro %s! expects no ident argument, \
|
fmt!("macro %s! expects no ident argument, \
|
||||||
|
@ -187,7 +248,7 @@ pub fn expand_item_mac(exts: SyntaxExtensions,
|
||||||
}));
|
}));
|
||||||
((*expand).expander)(cx, it.span, tts)
|
((*expand).expander)(cx, it.span, tts)
|
||||||
}
|
}
|
||||||
Some(ItemTT(ref expand)) => {
|
Some(@SE(IdentTT(ref expand))) => {
|
||||||
if it.ident == parse::token::special_idents::invalid {
|
if it.ident == parse::token::special_idents::invalid {
|
||||||
cx.span_fatal(pth.span,
|
cx.span_fatal(pth.span,
|
||||||
fmt!("macro %s! expects an ident argument",
|
fmt!("macro %s! expects an ident argument",
|
||||||
|
@ -214,7 +275,7 @@ pub fn expand_item_mac(exts: SyntaxExtensions,
|
||||||
MRAny(_, item_maker, _) =>
|
MRAny(_, item_maker, _) =>
|
||||||
option::chain(item_maker(), |i| {fld.fold_item(i)}),
|
option::chain(item_maker(), |i| {fld.fold_item(i)}),
|
||||||
MRDef(ref mdef) => {
|
MRDef(ref mdef) => {
|
||||||
exts.insert(@/*bad*/ copy mdef.name, (*mdef).ext);
|
extsbox.insert(@/*bad*/ copy mdef.name, @SE((*mdef).ext));
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -222,7 +283,8 @@ pub fn expand_item_mac(exts: SyntaxExtensions,
|
||||||
return maybe_it;
|
return maybe_it;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt,
|
// expand a stmt
|
||||||
|
pub fn expand_stmt(extsbox: @mut SyntaxEnv, cx: ext_ctxt,
|
||||||
&& s: stmt_, sp: span, fld: ast_fold,
|
&& s: stmt_, sp: span, fld: ast_fold,
|
||||||
orig: fn@(&&s: stmt_, span, ast_fold) -> (stmt_, span))
|
orig: fn@(&&s: stmt_, span, ast_fold) -> (stmt_, span))
|
||||||
-> (stmt_, span) {
|
-> (stmt_, span) {
|
||||||
|
@ -238,12 +300,12 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
|
|
||||||
assert(vec::len(pth.idents) == 1u);
|
assert(vec::len(pth.idents) == 1u);
|
||||||
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
let extname = cx.parse_sess().interner.get(pth.idents[0]);
|
||||||
let (fully_expanded, sp) = match exts.find(&extname) {
|
let (fully_expanded, sp) = match (*extsbox).find(&extname) {
|
||||||
None =>
|
None =>
|
||||||
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extname)),
|
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extname)),
|
||||||
|
|
||||||
Some(NormalTT(
|
Some(@SE(NormalTT(
|
||||||
SyntaxExpanderTT{expander: exp, span: exp_sp})) => {
|
SyntaxExpanderTT{expander: exp, span: exp_sp}))) => {
|
||||||
cx.bt_push(ExpandedFrom(CallInfo {
|
cx.bt_push(ExpandedFrom(CallInfo {
|
||||||
call_site: sp,
|
call_site: sp,
|
||||||
callee: NameAndSpan { name: *extname, span: exp_sp }
|
callee: NameAndSpan { name: *extname, span: exp_sp }
|
||||||
|
@ -271,7 +333,7 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return (match fully_expanded {
|
(match fully_expanded {
|
||||||
stmt_expr(e, stmt_id) if semi => stmt_semi(e, stmt_id),
|
stmt_expr(e, stmt_id) if semi => stmt_semi(e, stmt_id),
|
||||||
_ => { fully_expanded } /* might already have a semi */
|
_ => { fully_expanded } /* might already have a semi */
|
||||||
}, sp)
|
}, sp)
|
||||||
|
@ -279,19 +341,39 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
pub fn expand_block(extsbox: @mut SyntaxEnv, cx: ext_ctxt,
|
||||||
|
&& blk: blk_, sp: span, fld: ast_fold,
|
||||||
|
orig: fn@(&&s: blk_, span, ast_fold) -> (blk_, span))
|
||||||
|
-> (blk_, span) {
|
||||||
|
match (*extsbox).find(&@~" block") {
|
||||||
|
// no scope limit on macros in this block, no need
|
||||||
|
// to push an exts frame:
|
||||||
|
Some(@ScopeMacros(false)) => {
|
||||||
|
orig (blk,sp,fld)
|
||||||
|
},
|
||||||
|
// this block should limit the scope of its macros:
|
||||||
|
Some(@ScopeMacros(true)) => {
|
||||||
|
// see note below about treatment of exts table
|
||||||
|
with_exts_frame!(extsbox,orig(blk,sp,fld))
|
||||||
|
},
|
||||||
|
_ => cx.span_bug(sp,
|
||||||
|
~"expected ScopeMacros binding for \" block\"")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn new_span(cx: ext_ctxt, sp: span) -> span {
|
pub fn new_span(cx: ext_ctxt, sp: span) -> span {
|
||||||
/* this discards information in the case of macro-defining macros */
|
/* this discards information in the case of macro-defining macros */
|
||||||
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
|
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME (#2247): this is a terrible kludge to inject some macros into
|
// FIXME (#2247): this is a moderately bad kludge to inject some macros into
|
||||||
// the default compilation environment. When the macro-definition system
|
// the default compilation environment. It would be much nicer to use
|
||||||
// is substantially more mature, these should move from here, into a
|
// a mechanism like syntax_quote to ensure hygiene.
|
||||||
// compiled part of libcore at very least.
|
|
||||||
|
|
||||||
pub fn core_macros() -> ~str {
|
pub fn core_macros() -> ~str {
|
||||||
return
|
return
|
||||||
~"{
|
~"pub mod macros {
|
||||||
macro_rules! ignore (($($x:tt)*) => (()))
|
macro_rules! ignore (($($x:tt)*) => (()))
|
||||||
|
|
||||||
macro_rules! error ( ($( $arg:expr ),+) => (
|
macro_rules! error ( ($( $arg:expr ),+) => (
|
||||||
|
@ -341,29 +423,160 @@ pub fn core_macros() -> ~str {
|
||||||
|
|
||||||
pub fn expand_crate(parse_sess: @mut parse::ParseSess,
|
pub fn expand_crate(parse_sess: @mut parse::ParseSess,
|
||||||
cfg: ast::crate_cfg, c: @crate) -> @crate {
|
cfg: ast::crate_cfg, c: @crate) -> @crate {
|
||||||
let exts = syntax_expander_table();
|
// adding *another* layer of indirection here so that the block
|
||||||
|
// visitor can swap out one exts table for another for the duration
|
||||||
|
// of the block. The cleaner alternative would be to thread the
|
||||||
|
// exts table through the fold, but that would require updating
|
||||||
|
// every method/element of AstFoldFns in fold.rs.
|
||||||
|
let extsbox = @mut syntax_expander_table();
|
||||||
let afp = default_ast_fold();
|
let afp = default_ast_fold();
|
||||||
let cx: ext_ctxt = mk_ctxt(parse_sess, cfg);
|
let cx: ext_ctxt = mk_ctxt(parse_sess, cfg);
|
||||||
let f_pre = @AstFoldFns {
|
let f_pre = @AstFoldFns {
|
||||||
fold_expr: |a,b,c| expand_expr(exts, cx, a, b, c, afp.fold_expr),
|
fold_expr: |expr,span,recur|
|
||||||
fold_mod: |a,b| expand_mod_items(exts, cx, a, b, afp.fold_mod),
|
expand_expr(extsbox, cx, expr, span, recur, afp.fold_expr),
|
||||||
fold_item: |a,b| expand_item(exts, cx, a, b, afp.fold_item),
|
fold_mod: |modd,recur|
|
||||||
fold_stmt: |a,b,c| expand_stmt(exts, cx, a, b, c, afp.fold_stmt),
|
expand_mod_items(extsbox, cx, modd, recur, afp.fold_mod),
|
||||||
|
fold_item: |item,recur|
|
||||||
|
expand_item(extsbox, cx, item, recur, afp.fold_item),
|
||||||
|
fold_stmt: |stmt,span,recur|
|
||||||
|
expand_stmt(extsbox, cx, stmt, span, recur, afp.fold_stmt),
|
||||||
|
fold_block: |blk,span,recur|
|
||||||
|
expand_block (extsbox, cx, blk, span, recur, afp.fold_block),
|
||||||
new_span: |a| new_span(cx, a),
|
new_span: |a| new_span(cx, a),
|
||||||
.. *afp};
|
.. *afp};
|
||||||
let f = make_fold(f_pre);
|
let f = make_fold(f_pre);
|
||||||
let cm = parse_expr_from_source_str(~"<core-macros>",
|
// add a bunch of macros as though they were placed at the
|
||||||
@core_macros(),
|
// head of the program (ick).
|
||||||
cfg,
|
let attrs = ~[spanned {span:codemap::dummy_sp(),
|
||||||
parse_sess);
|
node: attribute_
|
||||||
|
{style:attr_outer,
|
||||||
|
value:spanned
|
||||||
|
{node:meta_word(@~"macro_escape"),
|
||||||
|
span:codemap::dummy_sp()},
|
||||||
|
is_sugared_doc:false}}];
|
||||||
|
|
||||||
|
let cm = match parse_item_from_source_str(~"<core-macros>",
|
||||||
|
@core_macros(),
|
||||||
|
cfg,attrs,
|
||||||
|
parse_sess) {
|
||||||
|
Some(item) => item,
|
||||||
|
None => cx.bug(~"expected core macros to parse correctly")
|
||||||
|
};
|
||||||
// This is run for its side-effects on the expander env,
|
// This is run for its side-effects on the expander env,
|
||||||
// as it registers all the core macros as expanders.
|
// as it registers all the core macros as expanders.
|
||||||
f.fold_expr(cm);
|
f.fold_item(cm);
|
||||||
|
|
||||||
let res = @f.fold_crate(*c);
|
let res = @f.fold_crate(*c);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use util::testing::check_equal;
|
||||||
|
|
||||||
|
// make sure that fail! is present
|
||||||
|
#[test] fn fail_exists_test () {
|
||||||
|
let src = ~"fn main() { fail!(~\"something appropriately gloomy\");}";
|
||||||
|
let sess = parse::new_parse_sess(None);
|
||||||
|
let cfg = ~[];
|
||||||
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
|
~"<test>",
|
||||||
|
@src,
|
||||||
|
cfg,sess);
|
||||||
|
expand_crate(sess,cfg,crate_ast);
|
||||||
|
}
|
||||||
|
|
||||||
|
// these following tests are quite fragile, in that they don't test what
|
||||||
|
// *kind* of failure occurs.
|
||||||
|
|
||||||
|
// make sure that macros can leave scope
|
||||||
|
#[should_fail]
|
||||||
|
#[test] fn macros_cant_escape_fns_test () {
|
||||||
|
let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\
|
||||||
|
fn inty() -> int { z!() }";
|
||||||
|
let sess = parse::new_parse_sess(None);
|
||||||
|
let cfg = ~[];
|
||||||
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
|
~"<test>",
|
||||||
|
@src,
|
||||||
|
cfg,sess);
|
||||||
|
// should fail:
|
||||||
|
expand_crate(sess,cfg,crate_ast);
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure that macros can leave scope for modules
|
||||||
|
#[should_fail]
|
||||||
|
#[test] fn macros_cant_escape_mods_test () {
|
||||||
|
let src = ~"mod foo {macro_rules! z (() => (3+4))}\
|
||||||
|
fn inty() -> int { z!() }";
|
||||||
|
let sess = parse::new_parse_sess(None);
|
||||||
|
let cfg = ~[];
|
||||||
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
|
~"<test>",
|
||||||
|
@src,
|
||||||
|
cfg,sess);
|
||||||
|
// should fail:
|
||||||
|
expand_crate(sess,cfg,crate_ast);
|
||||||
|
}
|
||||||
|
|
||||||
|
// macro_escape modules shouldn't cause macros to leave scope
|
||||||
|
#[test] fn macros_can_escape_flattened_mods_test () {
|
||||||
|
let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
|
||||||
|
fn inty() -> int { z!() }";
|
||||||
|
let sess = parse::new_parse_sess(None);
|
||||||
|
let cfg = ~[];
|
||||||
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
|
~"<test>",
|
||||||
|
@src,
|
||||||
|
cfg,sess);
|
||||||
|
// should fail:
|
||||||
|
expand_crate(sess,cfg,crate_ast);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn core_macros_must_parse () {
|
||||||
|
let src = ~"
|
||||||
|
pub mod macros {
|
||||||
|
macro_rules! ignore (($($x:tt)*) => (()))
|
||||||
|
|
||||||
|
macro_rules! error ( ($( $arg:expr ),+) => (
|
||||||
|
log(::core::error, fmt!( $($arg),+ )) ))
|
||||||
|
}";
|
||||||
|
let sess = parse::new_parse_sess(None);
|
||||||
|
let cfg = ~[];
|
||||||
|
let item_ast = parse::parse_item_from_source_str(
|
||||||
|
~"<test>",
|
||||||
|
@src,
|
||||||
|
cfg,~[make_dummy_attr (@~"macro_escape")],sess);
|
||||||
|
match item_ast {
|
||||||
|
Some(_) => (), // success
|
||||||
|
None => fail!(~"expected this to parse")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn test_contains_flatten (){
|
||||||
|
let attr1 = make_dummy_attr (@~"foo");
|
||||||
|
let attr2 = make_dummy_attr (@~"bar");
|
||||||
|
let escape_attr = make_dummy_attr (@~"macro_escape");
|
||||||
|
let attrs1 = ~[attr1, escape_attr, attr2];
|
||||||
|
check_equal (contains_macro_escape (attrs1),true);
|
||||||
|
let attrs2 = ~[attr1,attr2];
|
||||||
|
check_equal (contains_macro_escape (attrs2),false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// make a "meta_word" outer attribute with the given name
|
||||||
|
fn make_dummy_attr(s: @~str) -> ast::attribute {
|
||||||
|
spanned {span:codemap::dummy_sp(),
|
||||||
|
node: attribute_
|
||||||
|
{style:attr_outer,
|
||||||
|
value:spanned
|
||||||
|
{node:meta_word(s),
|
||||||
|
span:codemap::dummy_sp()},
|
||||||
|
is_sugared_doc:false}}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// Local Variables:
|
// Local Variables:
|
||||||
// mode: rust
|
// mode: rust
|
||||||
// fill-column: 78;
|
// fill-column: 78;
|
||||||
|
|
|
@ -22,22 +22,9 @@ use core::result;
|
||||||
use core::str;
|
use core::str;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
|
|
||||||
fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
|
// These macros all relate to the file system; they either return
|
||||||
let ExpandedFrom(CallInfo { call_site, _ }) = *expn_info;
|
// the column/row/filename of the expression, or they include
|
||||||
match call_site.expn_info {
|
// a given file into the current one.
|
||||||
Some(next_expn_info) => {
|
|
||||||
let ExpandedFrom(CallInfo {
|
|
||||||
callee: NameAndSpan {name, _},
|
|
||||||
_
|
|
||||||
}) = *next_expn_info;
|
|
||||||
// Don't recurse into file using "include!"
|
|
||||||
if name == ~"include" { return expn_info; }
|
|
||||||
|
|
||||||
topmost_expn_info(next_expn_info)
|
|
||||||
},
|
|
||||||
None => expn_info
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* line!(): expands to the current line number */
|
/* line!(): expands to the current line number */
|
||||||
pub fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
pub fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
||||||
|
@ -87,6 +74,9 @@ pub fn expand_mod(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
||||||
|x| cx.str_of(*x)), ~"::")))
|
|x| cx.str_of(*x)), ~"::")))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// include! : parse the given file as an expr
|
||||||
|
// This is generally a bad idea because it's going to behave
|
||||||
|
// unhygienically.
|
||||||
pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let file = get_single_str_from_tts(cx, sp, tts, "include!");
|
let file = get_single_str_from_tts(cx, sp, tts, "include!");
|
||||||
|
@ -96,6 +86,7 @@ pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
||||||
base::MRExpr(p.parse_expr())
|
base::MRExpr(p.parse_expr())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// include_str! : read the given file, insert it as a literal string expr
|
||||||
pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
|
let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
|
||||||
|
@ -126,6 +117,26 @@ pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// recur along an ExpnInfo chain to find the original expression
|
||||||
|
fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
|
||||||
|
let ExpandedFrom(CallInfo { call_site, _ }) = *expn_info;
|
||||||
|
match call_site.expn_info {
|
||||||
|
Some(next_expn_info) => {
|
||||||
|
let ExpandedFrom(CallInfo {
|
||||||
|
callee: NameAndSpan {name, _},
|
||||||
|
_
|
||||||
|
}) = *next_expn_info;
|
||||||
|
// Don't recurse into file using "include!"
|
||||||
|
if name == ~"include" { return expn_info; }
|
||||||
|
|
||||||
|
topmost_expn_info(next_expn_info)
|
||||||
|
},
|
||||||
|
None => expn_info
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolve a file-system path to an absolute file-system path (if it
|
||||||
|
// isn't already)
|
||||||
fn res_rel_file(cx: ext_ctxt, sp: codemap::span, arg: &Path) -> Path {
|
fn res_rel_file(cx: ext_ctxt, sp: codemap::span, arg: &Path) -> Path {
|
||||||
// NB: relative paths are resolved relative to the compilation unit
|
// NB: relative paths are resolved relative to the compilation unit
|
||||||
if !arg.is_absolute {
|
if !arg.is_absolute {
|
||||||
|
|
|
@ -770,11 +770,13 @@ pub mod test {
|
||||||
use diagnostic;
|
use diagnostic;
|
||||||
use util::testing::{check_equal, check_equal_ptr};
|
use util::testing::{check_equal, check_equal_ptr};
|
||||||
|
|
||||||
|
// represents a testing reader (incl. both reader and interner)
|
||||||
struct Env {
|
struct Env {
|
||||||
interner: @token::ident_interner,
|
interner: @token::ident_interner,
|
||||||
string_reader: @mut StringReader
|
string_reader: @mut StringReader
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// open a string reader for the given string
|
||||||
fn setup(teststr: ~str) -> Env {
|
fn setup(teststr: ~str) -> Env {
|
||||||
let cm = CodeMap::new();
|
let cm = CodeMap::new();
|
||||||
let fm = cm.new_filemap(~"zebra.rs", @teststr);
|
let fm = cm.new_filemap(~"zebra.rs", @teststr);
|
||||||
|
@ -809,6 +811,52 @@ pub mod test {
|
||||||
check_equal (string_reader.last_pos,BytePos(29))
|
check_equal (string_reader.last_pos,BytePos(29))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// check that the given reader produces the desired stream
|
||||||
|
// of tokens (stop checking after exhausting the expected vec)
|
||||||
|
fn check_tokenization (env: Env, expected: ~[token::Token]) {
|
||||||
|
for expected.each |expected_tok| {
|
||||||
|
let TokenAndSpan {tok:actual_tok, sp: _} =
|
||||||
|
env.string_reader.next_token();
|
||||||
|
check_equal(&actual_tok,expected_tok);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// make the identifier by looking up the string in the interner
|
||||||
|
fn mk_ident (env: Env, id: ~str, is_mod_name: bool) -> token::Token {
|
||||||
|
token::IDENT (env.interner.intern(@id),is_mod_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn doublecolonparsing () {
|
||||||
|
let env = setup (~"a b");
|
||||||
|
check_tokenization (env,
|
||||||
|
~[mk_ident (env,~"a",false),
|
||||||
|
mk_ident (env,~"b",false)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn dcparsing_2 () {
|
||||||
|
let env = setup (~"a::b");
|
||||||
|
check_tokenization (env,
|
||||||
|
~[mk_ident (env,~"a",true),
|
||||||
|
token::MOD_SEP,
|
||||||
|
mk_ident (env,~"b",false)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn dcparsing_3 () {
|
||||||
|
let env = setup (~"a ::b");
|
||||||
|
check_tokenization (env,
|
||||||
|
~[mk_ident (env,~"a",false),
|
||||||
|
token::MOD_SEP,
|
||||||
|
mk_ident (env,~"b",false)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn dcparsing_4 () {
|
||||||
|
let env = setup (~"a:: b");
|
||||||
|
check_tokenization (env,
|
||||||
|
~[mk_ident (env,~"a",true),
|
||||||
|
token::MOD_SEP,
|
||||||
|
mk_ident (env,~"b",false)]);
|
||||||
|
}
|
||||||
|
|
||||||
#[test] fn character_a() {
|
#[test] fn character_a() {
|
||||||
let env = setup(~"'a'");
|
let env = setup(~"'a'");
|
||||||
let TokenAndSpan {tok, sp: _} =
|
let TokenAndSpan {tok, sp: _} =
|
||||||
|
|
|
@ -94,9 +94,7 @@ pub fn parse_crate_from_source_str(name: ~str,
|
||||||
sess: @mut ParseSess) -> @ast::crate {
|
sess: @mut ParseSess) -> @ast::crate {
|
||||||
let p = new_parser_from_source_str(sess, cfg, name,
|
let p = new_parser_from_source_str(sess, cfg, name,
|
||||||
codemap::FssNone, source);
|
codemap::FssNone, source);
|
||||||
let r = p.parse_crate_mod(cfg);
|
maybe_aborted(p.parse_crate_mod(cfg),p)
|
||||||
p.abort_if_errors();
|
|
||||||
return r;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_expr_from_source_str(name: ~str,
|
pub fn parse_expr_from_source_str(name: ~str,
|
||||||
|
@ -105,9 +103,7 @@ pub fn parse_expr_from_source_str(name: ~str,
|
||||||
sess: @mut ParseSess) -> @ast::expr {
|
sess: @mut ParseSess) -> @ast::expr {
|
||||||
let p = new_parser_from_source_str(sess, cfg, name,
|
let p = new_parser_from_source_str(sess, cfg, name,
|
||||||
codemap::FssNone, source);
|
codemap::FssNone, source);
|
||||||
let r = p.parse_expr();
|
maybe_aborted(p.parse_expr(), p)
|
||||||
p.abort_if_errors();
|
|
||||||
return r;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_item_from_source_str(name: ~str,
|
pub fn parse_item_from_source_str(name: ~str,
|
||||||
|
@ -118,9 +114,7 @@ pub fn parse_item_from_source_str(name: ~str,
|
||||||
-> Option<@ast::item> {
|
-> Option<@ast::item> {
|
||||||
let p = new_parser_from_source_str(sess, cfg, name,
|
let p = new_parser_from_source_str(sess, cfg, name,
|
||||||
codemap::FssNone, source);
|
codemap::FssNone, source);
|
||||||
let r = p.parse_item(attrs);
|
maybe_aborted(p.parse_item(attrs),p)
|
||||||
p.abort_if_errors();
|
|
||||||
return r;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_stmt_from_source_str(name: ~str,
|
pub fn parse_stmt_from_source_str(name: ~str,
|
||||||
|
@ -130,9 +124,7 @@ pub fn parse_stmt_from_source_str(name: ~str,
|
||||||
sess: @mut ParseSess) -> @ast::stmt {
|
sess: @mut ParseSess) -> @ast::stmt {
|
||||||
let p = new_parser_from_source_str(sess, cfg, name,
|
let p = new_parser_from_source_str(sess, cfg, name,
|
||||||
codemap::FssNone, source);
|
codemap::FssNone, source);
|
||||||
let r = p.parse_stmt(attrs);
|
maybe_aborted(p.parse_stmt(attrs),p)
|
||||||
p.abort_if_errors();
|
|
||||||
return r;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_tts_from_source_str(name: ~str,
|
pub fn parse_tts_from_source_str(name: ~str,
|
||||||
|
@ -142,9 +134,7 @@ pub fn parse_tts_from_source_str(name: ~str,
|
||||||
let p = new_parser_from_source_str(sess, cfg, name,
|
let p = new_parser_from_source_str(sess, cfg, name,
|
||||||
codemap::FssNone, source);
|
codemap::FssNone, source);
|
||||||
*p.quote_depth += 1u;
|
*p.quote_depth += 1u;
|
||||||
let r = p.parse_all_token_trees();
|
maybe_aborted(p.parse_all_token_trees(),p)
|
||||||
p.abort_if_errors();
|
|
||||||
return r;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
|
pub fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
|
||||||
|
@ -159,8 +149,7 @@ pub fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
|
||||||
if !p.reader.is_eof() {
|
if !p.reader.is_eof() {
|
||||||
p.reader.fatal(~"expected end-of-string");
|
p.reader.fatal(~"expected end-of-string");
|
||||||
}
|
}
|
||||||
p.abort_if_errors();
|
maybe_aborted(r,p)
|
||||||
r
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_node_id(sess: @mut ParseSess) -> node_id {
|
pub fn next_node_id(sess: @mut ParseSess) -> node_id {
|
||||||
|
@ -181,8 +170,8 @@ pub fn new_parser_from_source_str(sess: @mut ParseSess, cfg: ast::crate_cfg,
|
||||||
return Parser(sess, cfg, srdr as reader);
|
return Parser(sess, cfg, srdr as reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the entire source file, return a parser
|
/// Read the entire source file, return a parser
|
||||||
// that draws from that string
|
/// that draws from that string
|
||||||
pub fn new_parser_result_from_file(sess: @mut ParseSess,
|
pub fn new_parser_result_from_file(sess: @mut ParseSess,
|
||||||
cfg: ast::crate_cfg,
|
cfg: ast::crate_cfg,
|
||||||
path: &Path)
|
path: &Path)
|
||||||
|
@ -201,7 +190,7 @@ pub fn new_parser_result_from_file(sess: @mut ParseSess,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new parser for an entire crate, handling errors as appropriate
|
/// Create a new parser, handling errors as appropriate
|
||||||
/// if the file doesn't exist
|
/// if the file doesn't exist
|
||||||
pub fn new_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg,
|
pub fn new_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg,
|
||||||
path: &Path) -> Parser {
|
path: &Path) -> Parser {
|
||||||
|
@ -232,6 +221,13 @@ pub fn new_parser_from_tts(sess: @mut ParseSess, cfg: ast::crate_cfg,
|
||||||
return Parser(sess, cfg, trdr as reader)
|
return Parser(sess, cfg, trdr as reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// abort if necessary
|
||||||
|
pub fn maybe_aborted<T>(+result : T, p: Parser) -> T {
|
||||||
|
p.abort_if_errors();
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
|
|
@ -87,7 +87,9 @@ pub enum Token {
|
||||||
LIT_STR(ast::ident),
|
LIT_STR(ast::ident),
|
||||||
|
|
||||||
/* Name components */
|
/* Name components */
|
||||||
// an identifier contains an "is_mod_name" boolean.
|
// an identifier contains an "is_mod_name" boolean,
|
||||||
|
// indicating whether :: follows this token with no
|
||||||
|
// whitespace in between.
|
||||||
IDENT(ast::ident, bool),
|
IDENT(ast::ident, bool),
|
||||||
UNDERSCORE,
|
UNDERSCORE,
|
||||||
LIFETIME(ast::ident),
|
LIFETIME(ast::ident),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue