rust/src/libsyntax/ext/tt/macro_rules.rs

146 lines
6 KiB
Rust
Raw Normal View History

// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use ast::{ident, matcher_, matcher, match_tok, match_nonterminal, match_seq};
use ast::{tt_delim};
use ast;
2013-01-30 09:56:33 -08:00
use codemap::{span, spanned, dummy_sp};
2013-01-22 16:45:27 -08:00
use ext::base::{ext_ctxt, MacResult, MRAny, MRDef, MacroDef, NormalTT};
use ext::base;
use ext::tt::macro_parser::{error};
use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
use ext::tt::macro_parser::{parse, parse_or_else, success, failure};
2012-09-04 11:37:29 -07:00
use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser;
2012-09-04 11:37:29 -07:00
use parse::token::special_idents;
use parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt};
use print;
use core::io;
use std::oldmap::HashMap;
pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
arg: ~[ast::token_tree]) -> base::MacResult {
// these spans won't matter, anyways
fn ms(m: matcher_) -> matcher {
2013-01-30 09:56:33 -08:00
spanned { node: m, span: dummy_sp() }
}
2012-07-18 16:18:02 -07:00
let lhs_nm = cx.parse_sess().interner.gensym(@~"lhs");
let rhs_nm = cx.parse_sess().interner.gensym(@~"rhs");
2012-07-27 17:32:15 -07:00
// The grammar for macro_rules! is:
// $( $lhs:mtcs => $rhs:tt );+
// ...quasiquoting this would be nice.
let argument_gram = ~[
ms(match_seq(~[
2012-07-18 16:18:02 -07:00
ms(match_nonterminal(lhs_nm, special_idents::matchers, 0u)),
ms(match_tok(FAT_ARROW)),
2012-07-18 16:18:02 -07:00
ms(match_nonterminal(rhs_nm, special_idents::tt, 1u)),
2012-08-20 12:23:37 -07:00
], Some(SEMI), false, 0u, 2u)),
2012-08-01 14:34:35 -07:00
//to phase into semicolon-termination instead of
//semicolon-separation
2012-08-20 12:23:37 -07:00
ms(match_seq(~[ms(match_tok(SEMI))], None, true, 2u, 2u))];
2012-07-27 17:32:15 -07:00
// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
2012-08-20 12:23:37 -07:00
cx.parse_sess().interner, None, arg);
2012-07-27 17:32:15 -07:00
let argument_map = parse_or_else(cx.parse_sess(), cx.cfg(),
arg_reader as reader, argument_gram);
2012-07-27 17:32:15 -07:00
// Extract the arguments:
let lhses:~[@named_match] = match argument_map.get(&lhs_nm) {
@matched_seq(s, _) => s,
2012-08-03 19:59:04 -07:00
_ => cx.span_bug(sp, ~"wrong-structured lhs")
};
let rhses:~[@named_match] = match argument_map.get(&rhs_nm) {
@matched_seq(s, _) => s,
2012-08-03 19:59:04 -07:00
_ => cx.span_bug(sp, ~"wrong-structured rhs")
};
2012-07-27 17:32:15 -07:00
// Given `lhses` and `rhses`, this is the new macro we create
2012-08-15 10:45:10 -07:00
fn generic_extension(cx: ext_ctxt, sp: span, name: ident,
2012-08-13 16:03:13 -07:00
arg: ~[ast::token_tree],
lhses: ~[@named_match], rhses: ~[@named_match])
2013-01-22 16:45:27 -08:00
-> MacResult {
2012-08-13 16:03:13 -07:00
2012-08-15 10:45:10 -07:00
if cx.trace_macros() {
2012-07-18 16:18:02 -07:00
io::println(fmt!("%s! { %s }",
cx.str_of(name),
print::pprust::tt_to_str(
2012-08-15 10:45:10 -07:00
ast::tt_delim(arg),
cx.parse_sess().interner)));
}
2012-08-13 16:03:13 -07:00
2012-07-27 17:32:15 -07:00
// Which arm's failure should we report? (the one furthest along)
let mut best_fail_spot = dummy_sp();
let mut best_fail_msg = ~"internal error: ran no matchers";
let s_d = cx.parse_sess().span_diagnostic;
let itr = cx.parse_sess().interner;
2012-07-27 17:32:15 -07:00
for lhses.eachi() |i, lhs| { // try each arm's matchers
match *lhs {
@matched_nonterminal(nt_matchers(ref mtcs)) => {
2012-07-27 17:32:15 -07:00
// `none` is because we're not interpolating
2012-08-20 12:23:37 -07:00
let arg_rdr = new_tt_reader(s_d, itr, None, arg) as reader;
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) {
2012-08-03 19:59:04 -07:00
success(named_matches) => {
2012-08-06 12:34:08 -07:00
let rhs = match rhses[i] {
// okay, what's your transcriber?
@matched_nonterminal(nt_tt(@ref tt)) => {
match (*tt) {
// cut off delimiters; don't parse 'em
2012-12-04 21:13:02 -08:00
tt_delim(ref tts) => {
(*tts).slice(1u,(*tts).len()-1u)
}
_ => cx.span_fatal(
sp, ~"macro rhs must be delimited")
}
},
_ => cx.span_bug(sp, ~"bad thing in rhs")
};
2012-07-27 17:32:15 -07:00
// rhs has holes ( `$id` and `$(...)` that need filled)
2012-08-20 12:23:37 -07:00
let trncbr = new_tt_reader(s_d, itr, Some(named_matches),
rhs);
let p = @Parser(cx.parse_sess(), cx.cfg(),
trncbr as reader);
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
2013-01-22 16:45:27 -08:00
return MRAny(|| p.parse_expr(),
|| p.parse_item(~[/* no attrs*/]),
|| p.parse_stmt(~[/* no attrs*/]));
}
failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {
2012-08-03 19:59:04 -07:00
best_fail_spot = sp;
best_fail_msg = (*msg);
2012-08-10 10:46:04 -07:00
},
error(sp, ref msg) => cx.span_fatal(sp, (*msg))
}
}
2012-08-03 19:59:04 -07:00
_ => cx.bug(~"non-matcher found in parsed lhses")
}
}
cx.span_fatal(best_fail_spot, best_fail_msg);
}
2013-01-22 16:45:27 -08:00
let exp: @fn(ext_ctxt, span, ~[ast::token_tree]) -> MacResult =
|cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses);
2013-01-22 16:45:27 -08:00
return MRDef(MacroDef{
2012-07-18 16:18:02 -07:00
name: *cx.parse_sess().interner.get(name),
2013-01-22 16:45:27 -08:00
ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)})
});
}