refactoring mod.rs

This commit is contained in:
John Clements 2013-04-23 10:57:41 -07:00
parent a2493ad048
commit c73a9c9cd0
5 changed files with 438 additions and 164 deletions

View file

@ -149,7 +149,7 @@ pub fn parse_input(sess: Session, cfg: ast::crate_cfg, input: &input)
-> @ast::crate { -> @ast::crate {
match *input { match *input {
file_input(ref file) => { file_input(ref file) => {
parse::parse_crate_from_file_using_tts(&(*file), cfg, sess.parse_sess) parse::parse_crate_from_file(&(*file), cfg, sess.parse_sess)
} }
str_input(ref src) => { str_input(ref src) => {
// FIXME (#2319): Don't really want to box the source string // FIXME (#2319): Don't really want to box the source string

View file

@ -79,7 +79,7 @@ mod test {
let parse_sess = syntax::parse::new_parse_sess(None); let parse_sess = syntax::parse::new_parse_sess(None);
let parser = parse::new_parser_from_source_str( let parser = parse::new_parser_from_source_str(
parse_sess, ~[], ~"-", codemap::FssNone, @source); parse_sess, ~[], ~"-", @source);
parser.parse_outer_attributes() parser.parse_outer_attributes()
} }

View file

@ -13,7 +13,7 @@
use ast::node_id; use ast::node_id;
use ast; use ast;
use codemap::{span, CodeMap}; use codemap::{span, CodeMap, FileMap, FileSubstr};
use codemap; use codemap;
use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter}; use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
use parse::attr::parser_attr; use parse::attr::parser_attr;
@ -22,7 +22,7 @@ use parse::parser::Parser;
use parse::token::{ident_interner, mk_ident_interner}; use parse::token::{ident_interner, mk_ident_interner};
use core::io; use core::io;
use core::option::{None, Option}; use core::option::{None, Option, Some};
use core::path::Path; use core::path::Path;
use core::result::{Err, Ok, Result}; use core::result::{Err, Ok, Result};
@ -82,31 +82,15 @@ pub fn new_parse_sess_special_handler(sh: @span_handler,
// uses a HOF to parse anything, and <source> includes file and // uses a HOF to parse anything, and <source> includes file and
// source_str. // source_str.
// this appears to be the main entry point for rust parsing by
// rustc and crate:
pub fn parse_crate_from_file( pub fn parse_crate_from_file(
input: &Path, input: &Path,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> @ast::crate { ) -> @ast::crate {
let p = new_parser_from_file(sess, /*bad*/ copy cfg, input); new_parser_from_file(sess, /*bad*/ copy cfg, input).parse_crate_mod()
p.parse_crate_mod(/*bad*/ copy cfg)
// why is there no p.abort_if_errors here? // why is there no p.abort_if_errors here?
} }
pub fn parse_crate_from_file_using_tts(
input: &Path,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::crate {
let p = new_parser_from_file(sess, /*bad*/ copy cfg, input);
let tts = p.parse_all_token_trees();
new_parser_from_tts(sess,cfg,tts).parse_crate_mod(/*bad*/ copy cfg)
// why is there no p.abort_if_errors here?
}
pub fn parse_crate_from_source_str( pub fn parse_crate_from_source_str(
name: ~str, name: ~str,
source: @~str, source: @~str,
@ -117,10 +101,9 @@ pub fn parse_crate_from_source_str(
sess, sess,
/*bad*/ copy cfg, /*bad*/ copy cfg,
/*bad*/ copy name, /*bad*/ copy name,
codemap::FssNone,
source source
); );
maybe_aborted(p.parse_crate_mod(/*bad*/ copy cfg),p) maybe_aborted(p.parse_crate_mod(),p)
} }
pub fn parse_expr_from_source_str( pub fn parse_expr_from_source_str(
@ -133,7 +116,6 @@ pub fn parse_expr_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, /*bad*/ copy name,
codemap::FssNone,
source source
); );
maybe_aborted(p.parse_expr(), p) maybe_aborted(p.parse_expr(), p)
@ -150,7 +132,6 @@ pub fn parse_item_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, /*bad*/ copy name,
codemap::FssNone,
source source
); );
maybe_aborted(p.parse_item(attrs),p) maybe_aborted(p.parse_item(attrs),p)
@ -166,7 +147,6 @@ pub fn parse_meta_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, /*bad*/ copy name,
codemap::FssNone,
source source
); );
maybe_aborted(p.parse_meta_item(),p) maybe_aborted(p.parse_meta_item(),p)
@ -183,7 +163,6 @@ pub fn parse_stmt_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, /*bad*/ copy name,
codemap::FssNone,
source source
); );
maybe_aborted(p.parse_stmt(attrs),p) maybe_aborted(p.parse_stmt(attrs),p)
@ -199,13 +178,18 @@ pub fn parse_tts_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, /*bad*/ copy name,
codemap::FssNone,
source source
); );
*p.quote_depth += 1u; *p.quote_depth += 1u;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p) maybe_aborted(p.parse_all_token_trees(),p)
} }
// given a function and parsing information (source str,
// filename, crate cfg, and sess), create a parser,
// apply the function, and check that the parser
// consumed all of the input before returning the function's
// result.
pub fn parse_from_source_str<T>( pub fn parse_from_source_str<T>(
f: &fn(&Parser) -> T, f: &fn(&Parser) -> T,
name: ~str, ss: codemap::FileSubstr, name: ~str, ss: codemap::FileSubstr,
@ -213,7 +197,7 @@ pub fn parse_from_source_str<T>(
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> T { ) -> T {
let p = new_parser_from_source_str( let p = new_parser_from_source_substr(
sess, sess,
cfg, cfg,
name, name,
@ -227,6 +211,7 @@ pub fn parse_from_source_str<T>(
maybe_aborted(r,p) maybe_aborted(r,p)
} }
// return the next unused node id.
pub fn next_node_id(sess: @mut ParseSess) -> node_id { pub fn next_node_id(sess: @mut ParseSess) -> node_id {
let rv = sess.next_id; let rv = sess.next_id;
sess.next_id += 1; sess.next_id += 1;
@ -235,39 +220,24 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id {
return rv; return rv;
} }
// Create a new parser from a source string
pub fn new_parser_from_source_str(sess: @mut ParseSess, pub fn new_parser_from_source_str(sess: @mut ParseSess,
cfg: ast::crate_cfg,
name: ~str,
source: @~str)
-> Parser {
filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
}
// Create a new parser from a source string where the origin
// is specified as a substring of another file.
pub fn new_parser_from_source_substr(sess: @mut ParseSess,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
name: ~str, name: ~str,
ss: codemap::FileSubstr, ss: codemap::FileSubstr,
source: @~str) source: @~str)
-> Parser { -> Parser {
let filemap = sess.cm.new_filemap_w_substr(name, ss, source); filemap_to_parser(sess,substring_to_filemap(sess,source,name,ss),cfg)
let srdr = lexer::new_string_reader(
copy sess.span_diagnostic,
filemap,
sess.interner
);
Parser(sess, cfg, srdr as @reader)
}
/// Read the entire source file, return a parser
/// that draws from that string
pub fn new_parser_result_from_file(
sess: @mut ParseSess,
cfg: ast::crate_cfg,
path: &Path
) -> Result<Parser, ~str> {
match io::read_whole_file_str(path) {
Ok(src) => {
let filemap = sess.cm.new_filemap(path.to_str(), @src);
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
filemap,
sess.interner);
Ok(Parser(sess, cfg, srdr as @reader))
}
Err(e) => Err(e)
}
} }
/// Create a new parser, handling errors as appropriate /// Create a new parser, handling errors as appropriate
@ -277,35 +247,85 @@ pub fn new_parser_from_file(
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
path: &Path path: &Path
) -> Parser { ) -> Parser {
match new_parser_result_from_file(sess, cfg, path) { filemap_to_parser(sess,file_to_filemap(sess,path,None),cfg)
Ok(parser) => parser,
Err(e) => {
sess.span_diagnostic.handler().fatal(e)
}
}
} }
/// Create a new parser based on a span from an existing parser. Handles /// Given a session, a crate config, a path, and a span, add
/// error messages correctly when the file does not exist. /// the file at the given path to the codemap, and return a parser.
/// On an error, use the given span as the source of the problem.
pub fn new_sub_parser_from_file( pub fn new_sub_parser_from_file(
sess: @mut ParseSess, sess: @mut ParseSess,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
path: &Path, path: &Path,
sp: span sp: span
) -> Parser { ) -> Parser {
match new_parser_result_from_file(sess, cfg, path) { filemap_to_parser(sess,file_to_filemap(sess,path,Some(sp)),cfg)
Ok(parser) => parser, }
/// Given a filemap and config, return a parser
pub fn filemap_to_parser(sess: @mut ParseSess,
filemap: @FileMap,
cfg: ast::crate_cfg) -> Parser {
tts_to_parser(sess,filemap_to_tts(sess,filemap),cfg)
}
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
pub fn new_parser_from_tts(sess: @mut ParseSess,
cfg: ast::crate_cfg,
tts: ~[ast::token_tree]) -> Parser {
tts_to_parser(sess,tts,cfg)
}
// base abstractions
/// Given a session and a path and an optional span (for error reporting),
/// add the path to the session's codemap and return the new filemap.
pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
-> @FileMap {
match io::read_whole_file_str(path) {
Ok(src) => string_to_filemap(sess, @src, path.to_str()),
Err(e) => { Err(e) => {
sess.span_diagnostic.span_fatal(sp, e) match spanopt {
Some(span) => sess.span_diagnostic.span_fatal(span, e),
None => sess.span_diagnostic.handler().fatal(e)
}
} }
} }
} }
pub fn new_parser_from_tts( // given a session and a string, add the string to
sess: @mut ParseSess, // the session's codemap and return the new filemap
cfg: ast::crate_cfg, pub fn string_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str)
tts: ~[ast::token_tree] -> @FileMap {
) -> Parser { sess.cm.new_filemap(path, source)
}
// given a session and a string and a path and a FileSubStr, add
// the string to the CodeMap and return the new FileMap
pub fn substring_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str,
filesubstr: FileSubstr) -> @FileMap {
sess.cm.new_filemap_w_substr(path,filesubstr,source)
}
// given a filemap, produce a sequence of token-trees
pub fn filemap_to_tts(sess: @mut ParseSess, filemap: @FileMap)
-> ~[ast::token_tree] {
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let cfg = ~[];
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
filemap,
sess.interner);
let p1 = Parser(sess, cfg, srdr as @reader);
p1.parse_all_token_trees()
}
// given tts and cfg, produce a parser
pub fn tts_to_parser(sess: @mut ParseSess,
tts: ~[ast::token_tree],
cfg: ast::crate_cfg) -> Parser {
let trdr = lexer::new_tt_reader( let trdr = lexer::new_tt_reader(
copy sess.span_diagnostic, copy sess.span_diagnostic,
sess.interner, sess.interner,
@ -329,8 +349,76 @@ mod test {
use std::serialize::Encodable; use std::serialize::Encodable;
use std; use std;
use core::io; use core::io;
use core::option::Option;
use core::option::Some;
use core::option::None; use core::option::None;
use core::int;
use core::num::NumCast;
use codemap::{dummy_sp, CodeMap, span, BytePos, spanned};
use opt_vec;
use ast; use ast;
use abi;
use ast_util::mk_ident;
use parse::parser::Parser;
use parse::token::{ident_interner, mk_ident_interner, mk_fresh_ident_interner};
use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
// add known names to interner for testing
fn mk_testing_interner() -> @ident_interner {
let i = mk_fresh_ident_interner();
// baby hack; in order to put the identifiers
// 'a' and 'b' at known locations, we're going
// to fill up the interner to length 100. If
// the # of preloaded items on the interner
// ever gets larger than 100, we'll have to
// adjust this number (say, to 200) and
// change the numbers in the identifier
// test cases below.
assert!(i.len() < 100);
for int::range(0,100-((i.len()).to_int())) |_dc| {
i.gensym(@~"dontcare");
}
i.intern(@~"a");
i.intern(@~"b");
i.intern(@~"c");
i.intern(@~"d");
i.intern(@~"return");
assert!(i.get(ast::ident{repr:101,ctxt:0}) == @~"b");
i
}
// make a parse_sess that's closed over a
// testing interner (where a -> 100, b -> 101)
fn mk_testing_parse_sess() -> @mut ParseSess {
let interner = mk_testing_interner();
let cm = @CodeMap::new();
@mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: mk_span_handler(mk_handler(None), cm),
interner: interner,
}
}
// map a string to tts, using a made-up filename: return both the token_trees
// and the ParseSess
fn string_to_tts_t (source_str : @~str) -> (~[ast::token_tree],@mut ParseSess) {
let ps = mk_testing_parse_sess();
(filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps)
}
// map a string to tts, return the tt without its parsesess
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
let (tts,ps) = string_to_tts_t(source_str);
tts
}
// map string to parser (via tts)
fn string_to_parser(source_str: @~str) -> Parser {
let ps = mk_testing_parse_sess();
new_parser_from_source_str(ps,~[],~"bogofile",source_str)
}
#[test] fn to_json_str<E : Encodable<std::json::Encoder>>(val: @E) -> ~str { #[test] fn to_json_str<E : Encodable<std::json::Encoder>>(val: @E) -> ~str {
do io::with_str_writer |writer| { do io::with_str_writer |writer| {
@ -339,49 +427,71 @@ mod test {
} }
fn string_to_crate (source_str : @~str) -> @ast::crate { fn string_to_crate (source_str : @~str) -> @ast::crate {
parse_crate_from_source_str( string_to_parser(source_str).parse_crate_mod()
~"bogofile",
source_str,
~[],
new_parse_sess(None))
} }
fn string_to_tt_to_crate (source_str : @~str) -> @ast::crate { fn string_to_expr (source_str : @~str) -> @ast::expr {
let tts = parse_tts_from_source_str( string_to_parser(source_str).parse_expr()
~"bogofile",
source_str,
~[],
new_parse_sess(None));
new_parser_from_tts(new_parse_sess(None),~[],tts)
.parse_crate_mod(~[])
} }
// make sure that parsing from TTs produces the same result fn string_to_item (source_str : @~str) -> Option<@ast::item> {
// as parsing from strings string_to_parser(source_str).parse_item(~[])
#[test] fn tts_produce_the_same_result () {
let source_str = @~"fn foo (x : int) { x; }";
assert_eq!(string_to_tt_to_crate(source_str),
string_to_crate(source_str));
} }
// check the contents of the tt manually: fn string_to_stmt (source_str : @~str) -> @ast::stmt {
#[test] fn alltts () { string_to_parser(source_str).parse_stmt(~[])
let source_str = @~"fn foo (x : int) { x; }"; }
let tts = parse_tts_from_source_str(
~"bogofile", // produce a codemap::span
source_str, fn sp (a: uint, b: uint) -> span {
~[], span{lo:BytePos(a),hi:BytePos(b),expn_info:None}
new_parse_sess(None)); }
assert_eq!(
to_json_str(@tts), // convert a vector of uints to a vector of ast::idents
~"[\ fn ints_to_idents(ids: ~[uint]) -> ~[ast::ident] {
ids.map(|u| mk_ident(*u))
}
#[test] fn path_exprs_1 () {
assert_eq!(string_to_expr(@~"a"),
@ast::expr{id:1,
callee_id:2,
node:ast::expr_path(@ast::Path {span:sp(0,1),
global:false,
idents:~[mk_ident(100)],
rp:None,
types:~[]}),
span:sp(0,1)})
}
#[test] fn path_exprs_2 () {
assert_eq!(string_to_expr(@~"::a::b"),
@ast::expr{id:1,
callee_id:2,
node:ast::expr_path(@ast::Path {span:sp(0,6),
global:true,
idents:ints_to_idents(~[100,101]),
rp:None,
types:~[]}),
span:sp(0,6)})
}
#[should_fail]
#[test] fn bad_path_expr_1() {
string_to_expr(@~"::abc::def::return");
}
#[test] fn string_to_tts_1 () {
let (tts,ps) = string_to_tts_t(@~"fn a (b : int) { b; }");
assert_eq!(to_json_str(@tts),
~"[\
[\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\ [\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
[\"tt_tok\",null,[\"IDENT\",\"foo\",false]],\ [\"tt_tok\",null,[\"IDENT\",\"a\",false]],\
[\ [\
\"tt_delim\",\ \"tt_delim\",\
[\ [\
[\"tt_tok\",null,\"LPAREN\"],\ [\"tt_tok\",null,\"LPAREN\"],\
[\"tt_tok\",null,[\"IDENT\",\"x\",false]],\ [\"tt_tok\",null,[\"IDENT\",\"b\",false]],\
[\"tt_tok\",null,\"COLON\"],\ [\"tt_tok\",null,\"COLON\"],\
[\"tt_tok\",null,[\"IDENT\",\"int\",false]],\ [\"tt_tok\",null,[\"IDENT\",\"int\",false]],\
[\"tt_tok\",null,\"RPAREN\"]\ [\"tt_tok\",null,\"RPAREN\"]\
@ -391,21 +501,177 @@ mod test {
\"tt_delim\",\ \"tt_delim\",\
[\ [\
[\"tt_tok\",null,\"LBRACE\"],\ [\"tt_tok\",null,\"LBRACE\"],\
[\"tt_tok\",null,[\"IDENT\",\"x\",false]],\ [\"tt_tok\",null,[\"IDENT\",\"b\",false]],\
[\"tt_tok\",null,\"SEMI\"],\ [\"tt_tok\",null,\"SEMI\"],\
[\"tt_tok\",null,\"RBRACE\"]\ [\"tt_tok\",null,\"RBRACE\"]\
]\ ]\
]\ ]\
]" ]"
); );
let ast1 = new_parser_from_tts(new_parse_sess(None),~[],tts) }
.parse_item(~[]);
let ast2 = parse_item_from_source_str( #[test] fn ret_expr() {
~"bogofile", assert_eq!(string_to_expr(@~"return d"),
@~"fn foo (x : int) { x; }", @ast::expr{id:3,
~[],~[], callee_id:4,
new_parse_sess(None)); node:ast::expr_ret(
assert_eq!(ast1,ast2); Some(@ast::expr{id:1,callee_id:2,
node:ast::expr_path(
@ast::Path{span:sp(7,8),
global:false,
idents:~[mk_ident(103)],
rp:None,
types:~[]
}),
span:sp(7,8)})),
span:sp(0,8)})
}
#[test] fn parse_stmt_1 () {
assert_eq!(string_to_stmt(@~"b;"),
@spanned{
node: ast::stmt_expr(@ast::expr{
id: 1,
callee_id: 2,
node: ast::expr_path(
@ast::Path{
span:sp(0,1),
global:false,
idents:~[mk_ident(101)],
rp:None,
types: ~[]}),
span: sp(0,1)},
3), // fixme
span: sp(0,1)})
}
#[test] fn parse_ident_pat () {
let parser = string_to_parser(@~"b");
assert_eq!(parser.parse_pat(false),
@ast::pat{id:1, // fixme
node: ast::pat_ident(ast::bind_by_copy,
@ast::Path{
span:sp(0,1),
global:false,
idents:~[mk_ident(101)],
rp: None,
types: ~[]},
None // no idea
),
span: sp(0,1)});
assert_eq!(*parser.token,token::EOF);
}
#[test] fn parse_arg () {
let parser = string_to_parser(@~"b : int");
assert_eq!(parser.parse_arg_general(true),
ast::arg{
mode: ast::infer(1),
is_mutbl: false,
ty: @ast::Ty{id:4, // fixme
node: ast::ty_path(@ast::Path{
span:sp(4,4), // this is bizarre...
// check this in the original parser?
global:false,
idents:~[mk_ident(105)],
rp: None,
types: ~[]},
3),
span:sp(4,7)},
pat: @ast::pat{id:2,
node: ast::pat_ident(ast::bind_by_copy,
@ast::Path{
span:sp(0,1),
global:false,
idents:~[mk_ident(101)],
rp: None,
types: ~[]},
None // no idea
),
span: sp(0,3)}, // really?
id: 5 // fixme
})
}
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int", and on the
// assignment order of the node_ids.
assert_eq!(string_to_item(@~"fn a (b : int) { b; }"),
Some(
@ast::item{ident:mk_ident(100),
attrs:~[],
id: 11, // fixme
node: ast::item_fn(ast::fn_decl{
inputs: ~[ast::arg{
mode: ast::infer(1),
is_mutbl: false,
ty: @ast::Ty{id:4, // fixme
node: ast::ty_path(@ast::Path{
span:sp(10,13),
global:false,
idents:~[mk_ident(106)],
rp: None,
types: ~[]},
3),
span:sp(10,13)},
pat: @ast::pat{id:2, // fixme
node: ast::pat_ident(
ast::bind_by_copy,
@ast::Path{
span:sp(6,7),
global:false,
idents:~[mk_ident(101)],
rp: None,
types: ~[]},
None // no idea
),
span: sp(6,9)}, // bleah.
id: 5 // fixme
}],
output: @ast::Ty{id:6, // fixme
node: ast::ty_nil,
span:sp(15,15)}, // not sure
cf: ast::return_val
},
ast::impure_fn,
abi::AbiSet::Rust(),
ast::Generics{ // no idea on either of these:
lifetimes: opt_vec::Empty,
ty_params: opt_vec::Empty,
},
spanned{
span: sp(15,21),
node: ast::blk_{
view_items: ~[],
stmts: ~[@spanned{
node: ast::stmt_semi(@ast::expr{
id: 7,
callee_id: 8,
node: ast::expr_path(
@ast::Path{
span:sp(17,18),
global:false,
idents:~[mk_ident(101)],
rp:None,
types: ~[]}),
span: sp(17,18)},
9), // fixme
span: sp(17,18)}],
expr: None,
id: 10, // fixme
rules: ast::default_blk // no idea
}}),
vis: ast::inherited,
span: sp(0,21)}));
}
#[test] fn parse_exprs () {
// just make sure that they parse....
string_to_expr(@~"3 + 4");
string_to_expr(@~"a::z.froob(b,@(987+3))");
} }
} }

View file

@ -4463,7 +4463,7 @@ pub impl Parser {
} }
// Parses a source module as a crate // Parses a source module as a crate
fn parse_crate_mod(&self, _cfg: crate_cfg) -> @crate { fn parse_crate_mod(&self) -> @crate {
let lo = self.span.lo; let lo = self.span.lo;
// parse the crate's inner attrs, maybe (oops) one // parse the crate's inner attrs, maybe (oops) one
// of the attrs of an item: // of the attrs of an item:

View file

@ -390,60 +390,68 @@ pub impl ident_interner {
} }
} }
// return a fresh interner, preloaded with special identifiers.
// EFFECT: stores this interner in TLS
pub fn mk_fresh_ident_interner() -> @ident_interner {
// the indices here must correspond to the numbers in
// special_idents.
let init_vec = ~[
@~"_", // 0
@~"anon", // 1
@~"drop", // 2
@~"", // 3
@~"unary", // 4
@~"!", // 5
@~"[]", // 6
@~"unary-", // 7
@~"__extensions__", // 8
@~"self", // 9
@~"item", // 10
@~"block", // 11
@~"stmt", // 12
@~"pat", // 13
@~"expr", // 14
@~"ty", // 15
@~"ident", // 16
@~"path", // 17
@~"tt", // 18
@~"matchers", // 19
@~"str", // 20
@~"TyVisitor", // 21
@~"arg", // 22
@~"descrim", // 23
@~"__rust_abi", // 24
@~"__rust_stack_shim", // 25
@~"TyDesc", // 26
@~"dtor", // 27
@~"main", // 28
@~"<opaque>", // 29
@~"blk", // 30
@~"static", // 31
@~"intrinsic", // 32
@~"__foreign_mod__", // 33
@~"__field__", // 34
@~"C", // 35
@~"Self", // 36
];
let rv = @ident_interner {
interner: interner::Interner::prefill(init_vec)
};
unsafe {
task::local_data::local_data_set(interner_key!(), @rv);
}
rv
}
// if an interner exists in TLS, return it. Otherwise, prepare a
// fresh one.
pub fn mk_ident_interner() -> @ident_interner { pub fn mk_ident_interner() -> @ident_interner {
unsafe { unsafe {
match task::local_data::local_data_get(interner_key!()) { match task::local_data::local_data_get(interner_key!()) {
Some(interner) => *interner, Some(interner) => *interner,
None => { None => {
// the indices here must correspond to the numbers in mk_fresh_ident_interner()
// special_idents.
let init_vec = ~[
@~"_", // 0
@~"anon", // 1
@~"drop", // 2
@~"", // 3
@~"unary", // 4
@~"!", // 5
@~"[]", // 6
@~"unary-", // 7
@~"__extensions__", // 8
@~"self", // 9
@~"item", // 10
@~"block", // 11
@~"stmt", // 12
@~"pat", // 13
@~"expr", // 14
@~"ty", // 15
@~"ident", // 16
@~"path", // 17
@~"tt", // 18
@~"matchers", // 19
@~"str", // 20
@~"TyVisitor", // 21
@~"arg", // 22
@~"descrim", // 23
@~"__rust_abi", // 24
@~"__rust_stack_shim", // 25
@~"TyDesc", // 26
@~"dtor", // 27
@~"main", // 28
@~"<opaque>", // 29
@~"blk", // 30
@~"static", // 31
@~"intrinsic", // 32
@~"__foreign_mod__", // 33
@~"__field__", // 34
@~"C", // 35
@~"Self", // 36
];
let rv = @ident_interner {
interner: interner::Interner::prefill(init_vec)
};
task::local_data::local_data_set(interner_key!(), @rv);
rv
} }
} }
} }