1
Fork 0

libsyntax: Fix tests.

This commit is contained in:
Patrick Walton 2014-01-30 18:46:19 -08:00 committed by Huon Wilson
parent 52eeed2f73
commit b890237e79
7 changed files with 52 additions and 50 deletions

View file

@ -134,7 +134,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
if !cx.sess.building_library.get() {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if attr.name().equiv(&("main")) {
if !attr.name().equiv(&("main")) {
Some(*attr)
} else {
None

View file

@ -468,7 +468,7 @@ mod test {
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line");
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&~"first line.");
// TESTING BROKEN BEHAVIOR:
@ -480,7 +480,7 @@ mod test {
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line");
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));

View file

@ -1021,11 +1021,11 @@ mod test {
// make sure that macros can leave scope
#[should_fail]
#[test] fn macros_cant_escape_fns_test () {
let src = @"fn bogus() {macro_rules! z (() => (3+4))}\
let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
@"<test>",
~"<test>",
src,
~[],sess);
// should fail:
@ -1036,11 +1036,11 @@ mod test {
// make sure that macros can leave scope for modules
#[should_fail]
#[test] fn macros_cant_escape_mods_test () {
let src = @"mod foo {macro_rules! z (() => (3+4))}\
let src = ~"mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
@"<test>",
~"<test>",
src,
~[],sess);
// should fail:
@ -1050,11 +1050,11 @@ mod test {
// macro_escape modules shouldn't cause macros to leave scope
#[test] fn macros_can_escape_flattened_mods_test () {
let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
@"<test>",
~"<test>",
src,
~[], sess);
// should fail:
@ -1063,9 +1063,9 @@ mod test {
}
#[test] fn test_contains_flatten (){
let attr1 = make_dummy_attr (@"foo");
let attr2 = make_dummy_attr (@"bar");
let escape_attr = make_dummy_attr (@"macro_escape");
let attr1 = make_dummy_attr ("foo");
let attr2 = make_dummy_attr ("bar");
let escape_attr = make_dummy_attr ("macro_escape");
let attrs1 = ~[attr1, escape_attr, attr2];
assert_eq!(contains_macro_escape (attrs1),true);
let attrs2 = ~[attr1,attr2];
@ -1073,13 +1073,13 @@ mod test {
}
// make a MetaWord outer attribute with the given name
fn make_dummy_attr(s: @str) -> ast::Attribute {
fn make_dummy_attr(s: &str) -> ast::Attribute {
Spanned {
span:codemap::DUMMY_SP,
node: Attribute_ {
style: AttrOuter,
value: @Spanned {
node: MetaWord(s),
node: MetaWord(token::intern_and_get_ident(s)),
span: codemap::DUMMY_SP,
},
is_sugared_doc: false,
@ -1089,7 +1089,7 @@ mod test {
#[test]
fn renaming () {
let item_ast = string_to_crate(@"fn f() -> int { a }");
let item_ast = string_to_crate(~"fn f() -> int { a }");
let a_name = intern("a");
let a2_name = gensym("a2");
let mut renamer = new_rename_folder(ast::Ident{name:a_name,ctxt:EMPTY_CTXT},
@ -1128,7 +1128,7 @@ mod test {
// pprust::print_crate_(&mut s, crate);
//}
fn expand_crate_str(crate_str: @str) -> ast::Crate {
fn expand_crate_str(crate_str: ~str) -> ast::Crate {
let (crate_ast,ps) = string_to_crate_and_sess(crate_str);
// the cfg argument actually does matter, here...
let mut loader = ErrLoader;
@ -1146,7 +1146,7 @@ mod test {
//}
#[test] fn macro_tokens_should_match(){
expand_crate_str(@"macro_rules! m((a)=>(13)) fn main(){m!(a);}");
expand_crate_str(~"macro_rules! m((a)=>(13)) fn main(){m!(a);}");
}
// renaming tests expand a crate and then check that the bindings match
@ -1222,7 +1222,7 @@ mod test {
let (teststr, bound_connections, bound_ident_check) = match *t {
(ref str,ref conns, bic) => (str.to_managed(), conns.clone(), bic)
};
let cr = expand_crate_str(teststr.to_managed());
let cr = expand_crate_str(teststr.to_owned());
// find the bindings:
let mut name_finder = new_name_finder(~[]);
visit::walk_crate(&mut name_finder,&cr,());
@ -1285,7 +1285,7 @@ mod test {
}
#[test] fn fmt_in_macro_used_inside_module_macro() {
let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
let crate_str = ~"macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
foo_module!()
";
@ -1335,7 +1335,7 @@ foo_module!()
#[test]
fn pat_idents(){
let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
let mut pat_idents = new_name_finder(~[]);
pat_idents.visit_pat(pat, ());
assert_eq!(pat_idents.ident_accumulator,

View file

@ -899,7 +899,8 @@ mod test {
// make sure idents get transformed everywhere
#[test] fn ident_transformation () {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
let ast = string_to_crate(
~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate,
@ -910,8 +911,9 @@ mod test {
// even inside macro defs....
#[test] fn ident_transformation_in_defs () {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+
=> (g $(d $d $e)+))} ");
let ast = string_to_crate(
~"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
(g $(d $d $e)+))} ");
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate,

View file

@ -972,9 +972,9 @@ mod test {
}
// open a string reader for the given string
fn setup(teststr: @str) -> Env {
fn setup(teststr: ~str) -> Env {
let cm = CodeMap::new();
let fm = cm.new_filemap(@"zebra.rs", teststr);
let fm = cm.new_filemap(~"zebra.rs", teststr);
let span_handler =
diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
Env {
@ -984,7 +984,7 @@ mod test {
#[test] fn t1 () {
let Env {string_reader} =
setup(@"/* my source file */ \
setup(~"/* my source file */ \
fn main() { println!(\"zebra\"); }\n");
let id = str_to_ident("fn");
let tok1 = string_reader.next_token();
@ -1020,14 +1020,14 @@ mod test {
}
#[test] fn doublecolonparsing () {
let env = setup (@"a b");
let env = setup (~"a b");
check_tokenization (env,
~[mk_ident("a",false),
mk_ident("b",false)]);
}
#[test] fn dcparsing_2 () {
let env = setup (@"a::b");
let env = setup (~"a::b");
check_tokenization (env,
~[mk_ident("a",true),
token::MOD_SEP,
@ -1035,7 +1035,7 @@ mod test {
}
#[test] fn dcparsing_3 () {
let env = setup (@"a ::b");
let env = setup (~"a ::b");
check_tokenization (env,
~[mk_ident("a",false),
token::MOD_SEP,
@ -1043,7 +1043,7 @@ mod test {
}
#[test] fn dcparsing_4 () {
let env = setup (@"a:: b");
let env = setup (~"a:: b");
check_tokenization (env,
~[mk_ident("a",true),
token::MOD_SEP,
@ -1051,28 +1051,28 @@ mod test {
}
#[test] fn character_a() {
let env = setup(@"'a'");
let env = setup(~"'a'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok,token::LIT_CHAR('a' as u32));
}
#[test] fn character_space() {
let env = setup(@"' '");
let env = setup(~"' '");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok, token::LIT_CHAR(' ' as u32));
}
#[test] fn character_escaped() {
let env = setup(@"'\\n'");
let env = setup(~"'\\n'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok, token::LIT_CHAR('\n' as u32));
}
#[test] fn lifetime_name() {
let env = setup(@"'abc");
let env = setup(~"'abc");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
let id = token::str_to_ident("abc");
@ -1080,7 +1080,7 @@ mod test {
}
#[test] fn raw_string() {
let env = setup(@"r###\"\"#a\\b\x00c\"\"###");
let env = setup(~"r###\"\"#a\\b\x00c\"\"###");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
let id = token::str_to_ident("\"#a\\b\x00c\"");
@ -1094,7 +1094,7 @@ mod test {
}
#[test] fn nested_block_comments() {
let env = setup(@"/* /* */ */'a'");
let env = setup(~"/* /* */ */'a'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok,token::LIT_CHAR('a' as u32));

View file

@ -314,7 +314,7 @@ mod test {
}
#[test] fn path_exprs_1() {
assert_eq!(string_to_expr(@"a"),
assert_eq!(string_to_expr(~"a"),
@ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
@ -333,7 +333,7 @@ mod test {
}
#[test] fn path_exprs_2 () {
assert_eq!(string_to_expr(@"::a::b"),
assert_eq!(string_to_expr(~"::a::b"),
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
@ -358,12 +358,12 @@ mod test {
#[should_fail]
#[test] fn bad_path_expr_1() {
string_to_expr(@"::abc::def::return");
string_to_expr(~"::abc::def::return");
}
// check the token-tree-ization of macros
#[test] fn string_to_tts_macro () {
let tts = string_to_tts(@"macro_rules! zip (($a)=>($a))");
let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))");
match tts {
[ast::TTTok(_,_),
ast::TTTok(_,token::NOT),
@ -407,7 +407,7 @@ mod test {
}
#[test] fn string_to_tts_1 () {
let tts = string_to_tts(@"fn a (b : int) { b; }");
let tts = string_to_tts(~"fn a (b : int) { b; }");
assert_eq!(to_json_str(&tts),
~"[\
{\
@ -536,7 +536,7 @@ mod test {
}
#[test] fn ret_expr() {
assert_eq!(string_to_expr(@"return d"),
assert_eq!(string_to_expr(~"return d"),
@ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprRet(Some(@ast::Expr{
@ -559,7 +559,7 @@ mod test {
}
#[test] fn parse_stmt_1 () {
assert_eq!(string_to_stmt(@"b;"),
assert_eq!(string_to_stmt(~"b;"),
@Spanned{
node: ast::StmtExpr(@ast::Expr {
id: ast::DUMMY_NODE_ID,
@ -585,7 +585,7 @@ mod test {
}
#[test] fn parse_ident_pat () {
let mut parser = string_to_parser(@"b");
let mut parser = string_to_parser(~"b");
assert_eq!(parser.parse_pat(),
@ast::Pat{id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(
@ -609,7 +609,7 @@ mod test {
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int"
assert_eq!(string_to_item(@"fn a (b : int) { b; }"),
assert_eq!(string_to_item(~"fn a (b : int) { b; }"),
Some(
@ast::Item{ident:str_to_ident("a"),
attrs:~[],
@ -701,12 +701,12 @@ mod test {
#[test] fn parse_exprs () {
// just make sure that they parse....
string_to_expr(@"3 + 4");
string_to_expr(@"a::z.froob(b,@(987+3))");
string_to_expr(~"3 + 4");
string_to_expr(~"a::z.froob(b,@(987+3))");
}
#[test] fn attrs_fix_bug () {
string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
string_to_item(~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<@Writer, ~str> {
#[cfg(windows)]
fn wb() -> c_int {

View file

@ -19,7 +19,7 @@ use parse::token;
// and the ParseSess
pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) {
let ps = new_parse_sess(None);
(filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps)
(filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps)
}
// map a string to tts, using a made-up filename:
@ -30,7 +30,7 @@ pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] {
pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) {
let ps = new_parse_sess(None);
(new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps)
(new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps)
}
// map string to parser (via tts)