Support prefix notation for vstore strings. Closes #2906.
This commit is contained in:
parent
e4de160222
commit
985b52be6d
46 changed files with 252 additions and 237 deletions
|
@ -283,7 +283,7 @@ fn load_crate(filename: str) -> option<crate> {
|
||||||
ast::view_item_use(ident, metas, id) {
|
ast::view_item_use(ident, metas, id) {
|
||||||
let name_items = attr::find_meta_items_by_name(metas, "name");
|
let name_items = attr::find_meta_items_by_name(metas, "name");
|
||||||
let m = if name_items.is_empty() {
|
let m = if name_items.is_empty() {
|
||||||
metas + ~[attr::mk_name_value_item_str(@"name", *ident)]
|
metas + ~[attr::mk_name_value_item_str(@"name"/~, *ident)]
|
||||||
} else {
|
} else {
|
||||||
metas
|
metas
|
||||||
};
|
};
|
||||||
|
|
|
@ -45,7 +45,7 @@ fn common_exprs() -> ~[ast::expr] {
|
||||||
dse(ast::expr_again),
|
dse(ast::expr_again),
|
||||||
dse(ast::expr_fail(option::none)),
|
dse(ast::expr_fail(option::none)),
|
||||||
dse(ast::expr_fail(option::some(
|
dse(ast::expr_fail(option::some(
|
||||||
@dse(ast::expr_lit(@dsl(ast::lit_str(@"boo"))))))),
|
@dse(ast::expr_lit(@dsl(ast::lit_str(@"boo"/~))))))),
|
||||||
dse(ast::expr_ret(option::none)),
|
dse(ast::expr_ret(option::none)),
|
||||||
dse(ast::expr_lit(@dsl(ast::lit_nil))),
|
dse(ast::expr_lit(@dsl(ast::lit_nil))),
|
||||||
dse(ast::expr_lit(@dsl(ast::lit_bool(false)))),
|
dse(ast::expr_lit(@dsl(ast::lit_bool(false)))),
|
||||||
|
|
|
@ -1301,10 +1301,10 @@ fn test_unkillable_nested() {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tls_multitask() unsafe {
|
fn test_tls_multitask() unsafe {
|
||||||
fn my_key(+_x: @str/~) { }
|
fn my_key(+_x: @str/~) { }
|
||||||
local_data_set(my_key, @"parent data");
|
local_data_set(my_key, @"parent data"/~);
|
||||||
do task::spawn {
|
do task::spawn {
|
||||||
assert local_data_get(my_key) == none; // TLS shouldn't carry over.
|
assert local_data_get(my_key) == none; // TLS shouldn't carry over.
|
||||||
local_data_set(my_key, @"child data");
|
local_data_set(my_key, @"child data"/~);
|
||||||
assert *(local_data_get(my_key).get()) == "child data";
|
assert *(local_data_get(my_key).get()) == "child data";
|
||||||
// should be cleaned up for us
|
// should be cleaned up for us
|
||||||
}
|
}
|
||||||
|
@ -1317,15 +1317,15 @@ fn test_tls_multitask() unsafe {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tls_overwrite() unsafe {
|
fn test_tls_overwrite() unsafe {
|
||||||
fn my_key(+_x: @str/~) { }
|
fn my_key(+_x: @str/~) { }
|
||||||
local_data_set(my_key, @"first data");
|
local_data_set(my_key, @"first data"/~);
|
||||||
local_data_set(my_key, @"next data"); // Shouldn't leak.
|
local_data_set(my_key, @"next data"/~); // Shouldn't leak.
|
||||||
assert *(local_data_get(my_key).get()) == "next data";
|
assert *(local_data_get(my_key).get()) == "next data";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tls_pop() unsafe {
|
fn test_tls_pop() unsafe {
|
||||||
fn my_key(+_x: @str/~) { }
|
fn my_key(+_x: @str/~) { }
|
||||||
local_data_set(my_key, @"weasel");
|
local_data_set(my_key, @"weasel"/~);
|
||||||
assert *(local_data_pop(my_key).get()) == "weasel";
|
assert *(local_data_pop(my_key).get()) == "weasel";
|
||||||
// Pop must remove the data from the map.
|
// Pop must remove the data from the map.
|
||||||
assert local_data_pop(my_key) == none;
|
assert local_data_pop(my_key) == none;
|
||||||
|
@ -1337,12 +1337,12 @@ fn test_tls_modify() unsafe {
|
||||||
local_data_modify(my_key, |data| {
|
local_data_modify(my_key, |data| {
|
||||||
alt data {
|
alt data {
|
||||||
some(@val) { fail "unwelcome value: " + val }
|
some(@val) { fail "unwelcome value: " + val }
|
||||||
none { some(@"first data") }
|
none { some(@"first data"/~) }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
local_data_modify(my_key, |data| {
|
local_data_modify(my_key, |data| {
|
||||||
alt data {
|
alt data {
|
||||||
some(@"first data") { some(@"next data") }
|
some(@"first data"/~) { some(@"next data"/~) }
|
||||||
some(@val) { fail "wrong value: " + val }
|
some(@val) { fail "wrong value: " + val }
|
||||||
none { fail "missing value" }
|
none { fail "missing value" }
|
||||||
}
|
}
|
||||||
|
@ -1359,7 +1359,7 @@ fn test_tls_crust_automorestack_memorial_bug() unsafe {
|
||||||
// for logging, think vsnprintf) would run on a stack smaller than 1 MB.
|
// for logging, think vsnprintf) would run on a stack smaller than 1 MB.
|
||||||
fn my_key(+_x: @str/~) { }
|
fn my_key(+_x: @str/~) { }
|
||||||
do task::spawn {
|
do task::spawn {
|
||||||
unsafe { local_data_set(my_key, @"hax"); }
|
unsafe { local_data_set(my_key, @"hax"/~); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1369,7 +1369,7 @@ fn test_tls_multiple_types() unsafe {
|
||||||
fn box_key(+_x: @@()) { }
|
fn box_key(+_x: @@()) { }
|
||||||
fn int_key(+_x: @int) { }
|
fn int_key(+_x: @int) { }
|
||||||
do task::spawn {
|
do task::spawn {
|
||||||
local_data_set(str_key, @"string data");
|
local_data_set(str_key, @"string data"/~);
|
||||||
local_data_set(box_key, @@());
|
local_data_set(box_key, @@());
|
||||||
local_data_set(int_key, @42);
|
local_data_set(int_key, @42);
|
||||||
}
|
}
|
||||||
|
@ -1381,7 +1381,7 @@ fn test_tls_overwrite_multiple_types() unsafe {
|
||||||
fn box_key(+_x: @@()) { }
|
fn box_key(+_x: @@()) { }
|
||||||
fn int_key(+_x: @int) { }
|
fn int_key(+_x: @int) { }
|
||||||
do task::spawn {
|
do task::spawn {
|
||||||
local_data_set(str_key, @"string data");
|
local_data_set(str_key, @"string data"/~);
|
||||||
local_data_set(int_key, @42);
|
local_data_set(int_key, @42);
|
||||||
// This could cause a segfault if overwriting-destruction is done with
|
// This could cause a segfault if overwriting-destruction is done with
|
||||||
// the crazy polymorphic transmute rather than the provided finaliser.
|
// the crazy polymorphic transmute rather than the provided finaliser.
|
||||||
|
@ -1396,10 +1396,10 @@ fn test_tls_cleanup_on_failure() unsafe {
|
||||||
fn str_key(+_x: @str/~) { }
|
fn str_key(+_x: @str/~) { }
|
||||||
fn box_key(+_x: @@()) { }
|
fn box_key(+_x: @@()) { }
|
||||||
fn int_key(+_x: @int) { }
|
fn int_key(+_x: @int) { }
|
||||||
local_data_set(str_key, @"parent data");
|
local_data_set(str_key, @"parent data"/~);
|
||||||
local_data_set(box_key, @@());
|
local_data_set(box_key, @@());
|
||||||
do task::spawn { // spawn_linked
|
do task::spawn { // spawn_linked
|
||||||
local_data_set(str_key, @"string data");
|
local_data_set(str_key, @"string data"/~);
|
||||||
local_data_set(box_key, @@());
|
local_data_set(box_key, @@());
|
||||||
local_data_set(int_key, @42);
|
local_data_set(int_key, @42);
|
||||||
fail;
|
fail;
|
||||||
|
|
|
@ -58,7 +58,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_bump_box_refcount() {
|
fn test_bump_box_refcount() {
|
||||||
unsafe {
|
unsafe {
|
||||||
let box = @"box box box"; // refcount 1
|
let box = @"box box box"/~; // refcount 1
|
||||||
bump_box_refcount(box); // refcount 2
|
bump_box_refcount(box); // refcount 2
|
||||||
let ptr: *int = transmute(box); // refcount 2
|
let ptr: *int = transmute(box); // refcount 2
|
||||||
let _box1: @str/~ = reinterpret_cast(ptr);
|
let _box1: @str/~ = reinterpret_cast(ptr);
|
||||||
|
|
|
@ -659,8 +659,8 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_write_str() {
|
fn test_write_str() {
|
||||||
assert to_str(string(@"")) == "\"\"";
|
assert to_str(string(@""/~)) == "\"\""/~;
|
||||||
assert to_str(string(@"foo")) == "\"foo\"";
|
assert to_str(string(@"foo"/~)) == "\"foo\""/~;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -676,7 +676,7 @@ mod tests {
|
||||||
assert to_str(list(@~[
|
assert to_str(list(@~[
|
||||||
boolean(false),
|
boolean(false),
|
||||||
null,
|
null,
|
||||||
list(@~[string(@"foo\nbar"), num(3.5f)])
|
list(@~[string(@"foo\nbar"/~), num(3.5f)])
|
||||||
])) == "[false, null, [\"foo\\nbar\", 3.5]]";
|
])) == "[false, null, [\"foo\\nbar\", 3.5]]";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -687,8 +687,8 @@ mod tests {
|
||||||
assert to_str(mk_dict(~[
|
assert to_str(mk_dict(~[
|
||||||
("a", boolean(true)),
|
("a", boolean(true)),
|
||||||
("b", list(@~[
|
("b", list(@~[
|
||||||
mk_dict(~[("c", string(@"\x0c\r"))]),
|
mk_dict(~[("c", string(@"\x0c\r"/~))]),
|
||||||
mk_dict(~[("d", string(@""))])
|
mk_dict(~[("d", string(@""/~))])
|
||||||
]))
|
]))
|
||||||
])) ==
|
])) ==
|
||||||
"{ " +
|
"{ " +
|
||||||
|
@ -703,35 +703,35 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_trailing_characters() {
|
fn test_trailing_characters() {
|
||||||
assert from_str("nulla") ==
|
assert from_str("nulla") ==
|
||||||
err({line: 1u, col: 5u, msg: @"trailing characters"});
|
err({line: 1u, col: 5u, msg: @"trailing characters"/~});
|
||||||
assert from_str("truea") ==
|
assert from_str("truea") ==
|
||||||
err({line: 1u, col: 5u, msg: @"trailing characters"});
|
err({line: 1u, col: 5u, msg: @"trailing characters"/~});
|
||||||
assert from_str("falsea") ==
|
assert from_str("falsea") ==
|
||||||
err({line: 1u, col: 6u, msg: @"trailing characters"});
|
err({line: 1u, col: 6u, msg: @"trailing characters"/~});
|
||||||
assert from_str("1a") ==
|
assert from_str("1a") ==
|
||||||
err({line: 1u, col: 2u, msg: @"trailing characters"});
|
err({line: 1u, col: 2u, msg: @"trailing characters"/~});
|
||||||
assert from_str("[]a") ==
|
assert from_str("[]a") ==
|
||||||
err({line: 1u, col: 3u, msg: @"trailing characters"});
|
err({line: 1u, col: 3u, msg: @"trailing characters"/~});
|
||||||
assert from_str("{}a") ==
|
assert from_str("{}a") ==
|
||||||
err({line: 1u, col: 3u, msg: @"trailing characters"});
|
err({line: 1u, col: 3u, msg: @"trailing characters"/~});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_read_identifiers() {
|
fn test_read_identifiers() {
|
||||||
assert from_str("n") ==
|
assert from_str("n") ==
|
||||||
err({line: 1u, col: 2u, msg: @"invalid syntax"});
|
err({line: 1u, col: 2u, msg: @"invalid syntax"/~});
|
||||||
assert from_str("nul") ==
|
assert from_str("nul") ==
|
||||||
err({line: 1u, col: 4u, msg: @"invalid syntax"});
|
err({line: 1u, col: 4u, msg: @"invalid syntax"/~});
|
||||||
|
|
||||||
assert from_str("t") ==
|
assert from_str("t") ==
|
||||||
err({line: 1u, col: 2u, msg: @"invalid syntax"});
|
err({line: 1u, col: 2u, msg: @"invalid syntax"/~});
|
||||||
assert from_str("truz") ==
|
assert from_str("truz") ==
|
||||||
err({line: 1u, col: 4u, msg: @"invalid syntax"});
|
err({line: 1u, col: 4u, msg: @"invalid syntax"/~});
|
||||||
|
|
||||||
assert from_str("f") ==
|
assert from_str("f") ==
|
||||||
err({line: 1u, col: 2u, msg: @"invalid syntax"});
|
err({line: 1u, col: 2u, msg: @"invalid syntax"/~});
|
||||||
assert from_str("faz") ==
|
assert from_str("faz") ==
|
||||||
err({line: 1u, col: 3u, msg: @"invalid syntax"});
|
err({line: 1u, col: 3u, msg: @"invalid syntax"/~});
|
||||||
|
|
||||||
assert from_str("null") == ok(null);
|
assert from_str("null") == ok(null);
|
||||||
assert from_str("true") == ok(boolean(true));
|
assert from_str("true") == ok(boolean(true));
|
||||||
|
@ -744,20 +744,20 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_read_num() {
|
fn test_read_num() {
|
||||||
assert from_str("+") ==
|
assert from_str("+") ==
|
||||||
err({line: 1u, col: 1u, msg: @"invalid syntax"});
|
err({line: 1u, col: 1u, msg: @"invalid syntax"/~});
|
||||||
assert from_str(".") ==
|
assert from_str(".") ==
|
||||||
err({line: 1u, col: 1u, msg: @"invalid syntax"});
|
err({line: 1u, col: 1u, msg: @"invalid syntax"/~});
|
||||||
|
|
||||||
assert from_str("-") ==
|
assert from_str("-") ==
|
||||||
err({line: 1u, col: 2u, msg: @"invalid number"});
|
err({line: 1u, col: 2u, msg: @"invalid number"/~});
|
||||||
assert from_str("00") ==
|
assert from_str("00") ==
|
||||||
err({line: 1u, col: 2u, msg: @"invalid number"});
|
err({line: 1u, col: 2u, msg: @"invalid number"/~});
|
||||||
assert from_str("1.") ==
|
assert from_str("1.") ==
|
||||||
err({line: 1u, col: 3u, msg: @"invalid number"});
|
err({line: 1u, col: 3u, msg: @"invalid number"/~});
|
||||||
assert from_str("1e") ==
|
assert from_str("1e") ==
|
||||||
err({line: 1u, col: 3u, msg: @"invalid number"});
|
err({line: 1u, col: 3u, msg: @"invalid number"/~});
|
||||||
assert from_str("1e+") ==
|
assert from_str("1e+") ==
|
||||||
err({line: 1u, col: 4u, msg: @"invalid number"});
|
err({line: 1u, col: 4u, msg: @"invalid number"/~});
|
||||||
|
|
||||||
assert from_str("3") == ok(num(3f));
|
assert from_str("3") == ok(num(3f));
|
||||||
assert from_str("3.1") == ok(num(3.1f));
|
assert from_str("3.1") == ok(num(3.1f));
|
||||||
|
@ -772,32 +772,32 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_read_str() {
|
fn test_read_str() {
|
||||||
assert from_str("\"") ==
|
assert from_str("\"") ==
|
||||||
err({line: 1u, col: 2u, msg: @"EOF while parsing string"});
|
err({line: 1u, col: 2u, msg: @"EOF while parsing string"/~});
|
||||||
assert from_str("\"lol") ==
|
assert from_str("\"lol") ==
|
||||||
err({line: 1u, col: 5u, msg: @"EOF while parsing string"});
|
err({line: 1u, col: 5u, msg: @"EOF while parsing string"/~});
|
||||||
|
|
||||||
assert from_str("\"\"") == ok(string(@""));
|
assert from_str("\"\"") == ok(string(@""/~));
|
||||||
assert from_str("\"foo\"") == ok(string(@"foo"));
|
assert from_str("\"foo\"") == ok(string(@"foo"/~));
|
||||||
assert from_str("\"\\\"\"") == ok(string(@"\""));
|
assert from_str("\"\\\"\"") == ok(string(@"\""/~));
|
||||||
assert from_str("\"\\b\"") == ok(string(@"\x08"));
|
assert from_str("\"\\b\"") == ok(string(@"\x08"/~));
|
||||||
assert from_str("\"\\n\"") == ok(string(@"\n"));
|
assert from_str("\"\\n\"") == ok(string(@"\n"/~));
|
||||||
assert from_str("\"\\r\"") == ok(string(@"\r"));
|
assert from_str("\"\\r\"") == ok(string(@"\r"/~));
|
||||||
assert from_str("\"\\t\"") == ok(string(@"\t"));
|
assert from_str("\"\\t\"") == ok(string(@"\t"/~));
|
||||||
assert from_str(" \"foo\" ") == ok(string(@"foo"));
|
assert from_str(" \"foo\" ") == ok(string(@"foo"/~));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_read_list() {
|
fn test_read_list() {
|
||||||
assert from_str("[") ==
|
assert from_str("[") ==
|
||||||
err({line: 1u, col: 2u, msg: @"EOF while parsing value"});
|
err({line: 1u, col: 2u, msg: @"EOF while parsing value"/~});
|
||||||
assert from_str("[1") ==
|
assert from_str("[1") ==
|
||||||
err({line: 1u, col: 3u, msg: @"EOF while parsing list"});
|
err({line: 1u, col: 3u, msg: @"EOF while parsing list"/~});
|
||||||
assert from_str("[1,") ==
|
assert from_str("[1,") ==
|
||||||
err({line: 1u, col: 4u, msg: @"EOF while parsing value"});
|
err({line: 1u, col: 4u, msg: @"EOF while parsing value"/~});
|
||||||
assert from_str("[1,]") ==
|
assert from_str("[1,]") ==
|
||||||
err({line: 1u, col: 4u, msg: @"invalid syntax"});
|
err({line: 1u, col: 4u, msg: @"invalid syntax"/~});
|
||||||
assert from_str("[6 7]") ==
|
assert from_str("[6 7]") ==
|
||||||
err({line: 1u, col: 4u, msg: @"expected `,` or `]`"});
|
err({line: 1u, col: 4u, msg: @"expected `,` or `]`"/~});
|
||||||
|
|
||||||
assert from_str("[]") == ok(list(@~[]));
|
assert from_str("[]") == ok(list(@~[]));
|
||||||
assert from_str("[ ]") == ok(list(@~[]));
|
assert from_str("[ ]") == ok(list(@~[]));
|
||||||
|
@ -813,28 +813,28 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_read_dict() {
|
fn test_read_dict() {
|
||||||
assert from_str("{") ==
|
assert from_str("{") ==
|
||||||
err({line: 1u, col: 2u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 2u, msg: @"EOF while parsing object"/~});
|
||||||
assert from_str("{ ") ==
|
assert from_str("{ ") ==
|
||||||
err({line: 1u, col: 3u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 3u, msg: @"EOF while parsing object"/~});
|
||||||
assert from_str("{1") ==
|
assert from_str("{1") ==
|
||||||
err({line: 1u, col: 2u, msg: @"key must be a string"});
|
err({line: 1u, col: 2u, msg: @"key must be a string"/~});
|
||||||
assert from_str("{ \"a\"") ==
|
assert from_str("{ \"a\"") ==
|
||||||
err({line: 1u, col: 6u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 6u, msg: @"EOF while parsing object"/~});
|
||||||
assert from_str("{\"a\"") ==
|
assert from_str("{\"a\"") ==
|
||||||
err({line: 1u, col: 5u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 5u, msg: @"EOF while parsing object"/~});
|
||||||
assert from_str("{\"a\" ") ==
|
assert from_str("{\"a\" ") ==
|
||||||
err({line: 1u, col: 6u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 6u, msg: @"EOF while parsing object"/~});
|
||||||
|
|
||||||
assert from_str("{\"a\" 1") ==
|
assert from_str("{\"a\" 1") ==
|
||||||
err({line: 1u, col: 6u, msg: @"expected `:`"});
|
err({line: 1u, col: 6u, msg: @"expected `:`"/~});
|
||||||
assert from_str("{\"a\":") ==
|
assert from_str("{\"a\":") ==
|
||||||
err({line: 1u, col: 6u, msg: @"EOF while parsing value"});
|
err({line: 1u, col: 6u, msg: @"EOF while parsing value"/~});
|
||||||
assert from_str("{\"a\":1") ==
|
assert from_str("{\"a\":1") ==
|
||||||
err({line: 1u, col: 7u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 7u, msg: @"EOF while parsing object"/~});
|
||||||
assert from_str("{\"a\":1 1") ==
|
assert from_str("{\"a\":1 1") ==
|
||||||
err({line: 1u, col: 8u, msg: @"expected `,` or `}`"});
|
err({line: 1u, col: 8u, msg: @"expected `,` or `}`"/~});
|
||||||
assert from_str("{\"a\":1,") ==
|
assert from_str("{\"a\":1,") ==
|
||||||
err({line: 1u, col: 8u, msg: @"EOF while parsing object"});
|
err({line: 1u, col: 8u, msg: @"EOF while parsing object"/~});
|
||||||
|
|
||||||
assert eq(result::get(from_str("{}")), mk_dict(~[]));
|
assert eq(result::get(from_str("{}")), mk_dict(~[]));
|
||||||
assert eq(result::get(from_str("{\"a\": 3}")),
|
assert eq(result::get(from_str("{\"a\": 3}")),
|
||||||
|
@ -866,7 +866,7 @@ mod tests {
|
||||||
("a", num(1.0f)),
|
("a", num(1.0f)),
|
||||||
("b", list(@~[
|
("b", list(@~[
|
||||||
boolean(true),
|
boolean(true),
|
||||||
string(@"foo\nbar"),
|
string(@"foo\nbar"/~),
|
||||||
mk_dict(~[
|
mk_dict(~[
|
||||||
("c", mk_dict(~[("d", null)]))
|
("c", mk_dict(~[("d", null)]))
|
||||||
])
|
])
|
||||||
|
@ -877,6 +877,6 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_multiline_errors() {
|
fn test_multiline_errors() {
|
||||||
assert from_str("{\n \"foo\":\n \"bar\"") ==
|
assert from_str("{\n \"foo\":\n \"bar\"") ==
|
||||||
err({line: 3u, col: 8u, msg: @"EOF while parsing object"});
|
err({line: 3u, col: 8u, msg: @"EOF while parsing object"/~});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1270,7 +1270,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn of_string1() {
|
fn of_string1() {
|
||||||
let sample = @"0123456789ABCDE";
|
let sample = @"0123456789ABCDE"/~;
|
||||||
let r = of_str(sample);
|
let r = of_str(sample);
|
||||||
|
|
||||||
assert char_len(r) == str::char_len(*sample);
|
assert char_len(r) == str::char_len(*sample);
|
||||||
|
@ -1330,7 +1330,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bal1() {
|
fn bal1() {
|
||||||
let init = @ "1234567890";
|
let init = @"1234567890"/~;
|
||||||
let buf = @mut * init;
|
let buf = @mut * init;
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while i < 8 { *buf = *buf + *buf; i+=1;}
|
while i < 8 { *buf = *buf + *buf; i+=1;}
|
||||||
|
@ -1352,7 +1352,7 @@ mod tests {
|
||||||
#[ignore]
|
#[ignore]
|
||||||
fn char_at1() {
|
fn char_at1() {
|
||||||
//Generate a large rope
|
//Generate a large rope
|
||||||
let mut r = of_str(@ "123456789");
|
let mut r = of_str(@"123456789"/~);
|
||||||
for uint::range(0u, 10u) |_i| {
|
for uint::range(0u, 10u) |_i| {
|
||||||
r = append_rope(r, r);
|
r = append_rope(r, r);
|
||||||
}
|
}
|
||||||
|
@ -1384,7 +1384,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn concat1() {
|
fn concat1() {
|
||||||
//Generate a reasonable rope
|
//Generate a reasonable rope
|
||||||
let chunk = of_str(@ "123456789");
|
let chunk = of_str(@"123456789"/~);
|
||||||
let mut r = empty();
|
let mut r = empty();
|
||||||
for uint::range(0u, 10u) |_i| {
|
for uint::range(0u, 10u) |_i| {
|
||||||
r = append_rope(r, chunk);
|
r = append_rope(r, chunk);
|
||||||
|
|
|
@ -381,7 +381,7 @@ fn dtor_dec() -> fn_decl {
|
||||||
let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
|
let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
|
||||||
// dtor has one argument, of type ()
|
// dtor has one argument, of type ()
|
||||||
{inputs: ~[{mode: ast::expl(ast::by_ref),
|
{inputs: ~[{mode: ast::expl(ast::by_ref),
|
||||||
ty: nil_t, ident: @"_", id: 0}],
|
ty: nil_t, ident: @"_"/~, id: 0}],
|
||||||
output: nil_t, purity: impure_fn, cf: return_val, constraints: ~[]}
|
output: nil_t, purity: impure_fn, cf: return_val, constraints: ~[]}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ fn mk_sugared_doc_attr(text: str, lo: uint, hi: uint) -> ast::attribute {
|
||||||
let lit = spanned(lo, hi, ast::lit_str(@text));
|
let lit = spanned(lo, hi, ast::lit_str(@text));
|
||||||
let attr = {
|
let attr = {
|
||||||
style: doc_comment_style(text),
|
style: doc_comment_style(text),
|
||||||
value: spanned(lo, hi, ast::meta_name_value(@"doc", lit)),
|
value: spanned(lo, hi, ast::meta_name_value(@"doc"/~, lit)),
|
||||||
is_sugared_doc: true
|
is_sugared_doc: true
|
||||||
};
|
};
|
||||||
ret spanned(lo, hi, attr);
|
ret spanned(lo, hi, attr);
|
||||||
|
@ -97,7 +97,7 @@ fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
|
||||||
fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute {
|
fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute {
|
||||||
if attr.node.is_sugared_doc {
|
if attr.node.is_sugared_doc {
|
||||||
let comment = get_meta_item_value_str(@attr.node.value).get();
|
let comment = get_meta_item_value_str(@attr.node.value).get();
|
||||||
let meta = mk_name_value_item_str(@"doc",
|
let meta = mk_name_value_item_str(@"doc"/~,
|
||||||
strip_doc_comment_decoration(*comment));
|
strip_doc_comment_decoration(*comment));
|
||||||
ret mk_attr(meta);
|
ret mk_attr(meta);
|
||||||
} else {
|
} else {
|
||||||
|
@ -345,13 +345,13 @@ fn foreign_abi(attrs: ~[ast::attribute]) -> either<str, ast::foreign_abi> {
|
||||||
option::none {
|
option::none {
|
||||||
either::right(ast::foreign_abi_cdecl)
|
either::right(ast::foreign_abi_cdecl)
|
||||||
}
|
}
|
||||||
option::some(@"rust-intrinsic") {
|
option::some(@"rust-intrinsic"/~) {
|
||||||
either::right(ast::foreign_abi_rust_intrinsic)
|
either::right(ast::foreign_abi_rust_intrinsic)
|
||||||
}
|
}
|
||||||
option::some(@"cdecl") {
|
option::some(@"cdecl"/~) {
|
||||||
either::right(ast::foreign_abi_cdecl)
|
either::right(ast::foreign_abi_cdecl)
|
||||||
}
|
}
|
||||||
option::some(@"stdcall") {
|
option::some(@"stdcall"/~) {
|
||||||
either::right(ast::foreign_abi_stdcall)
|
either::right(ast::foreign_abi_stdcall)
|
||||||
}
|
}
|
||||||
option::some(t) {
|
option::some(t) {
|
||||||
|
@ -371,8 +371,8 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
|
||||||
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
|
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
|
||||||
do vec::foldl(ia_none, attrs) |ia,attr| {
|
do vec::foldl(ia_none, attrs) |ia,attr| {
|
||||||
alt attr.node.value.node {
|
alt attr.node.value.node {
|
||||||
ast::meta_word(@"inline") { ia_hint }
|
ast::meta_word(@"inline"/~) { ia_hint }
|
||||||
ast::meta_list(@"inline", items) {
|
ast::meta_list(@"inline"/~, items) {
|
||||||
if !vec::is_empty(find_meta_items_by_name(items, "always")) {
|
if !vec::is_empty(find_meta_items_by_name(items, "always")) {
|
||||||
ia_always
|
ia_always
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -250,7 +250,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
|
||||||
|
|
||||||
fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
|
fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
|
||||||
do option::iter (sp.expn_info) |ei| {
|
do option::iter (sp.expn_info) |ei| {
|
||||||
let ss = option::map_default(ei.callie.span, @"",
|
let ss = option::map_default(ei.callie.span, @""/~,
|
||||||
|span| @codemap::span_to_str(span, cm));
|
|span| @codemap::span_to_str(span, cm));
|
||||||
print_diagnostic(*ss, note,
|
print_diagnostic(*ss, note,
|
||||||
#fmt("in expansion of #%s", ei.callie.name));
|
#fmt("in expansion of #%s", ei.callie.name));
|
||||||
|
|
|
@ -92,7 +92,7 @@ fn expand(cx: ext_ctxt,
|
||||||
_mitem: ast::meta_item,
|
_mitem: ast::meta_item,
|
||||||
in_items: ~[@ast::item]) -> ~[@ast::item] {
|
in_items: ~[@ast::item]) -> ~[@ast::item] {
|
||||||
fn not_auto_serialize(a: ast::attribute) -> bool {
|
fn not_auto_serialize(a: ast::attribute) -> bool {
|
||||||
attr::get_attr_name(a) != @"auto_serialize"
|
attr::get_attr_name(a) != @"auto_serialize"/~
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filter_attrs(item: @ast::item) -> @ast::item {
|
fn filter_attrs(item: @ast::item) -> @ast::item {
|
||||||
|
@ -154,7 +154,7 @@ impl helpers for ext_ctxt {
|
||||||
let args = do vec::map(input_tys) |ty| {
|
let args = do vec::map(input_tys) |ty| {
|
||||||
{mode: ast::expl(ast::by_ref),
|
{mode: ast::expl(ast::by_ref),
|
||||||
ty: ty,
|
ty: ty,
|
||||||
ident: @"",
|
ident: @""/~,
|
||||||
id: self.next_id()}
|
id: self.next_id()}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -350,7 +350,7 @@ fn is_vec_or_str(ty: @ast::ty) -> bool {
|
||||||
// This may be wrong if the user has shadowed (!) str
|
// This may be wrong if the user has shadowed (!) str
|
||||||
ast::ty_path(@{span: _, global: _, idents: ids,
|
ast::ty_path(@{span: _, global: _, idents: ids,
|
||||||
rp: none, types: _}, _)
|
rp: none, types: _}, _)
|
||||||
if ids == ~[@"str"] { true }
|
if ids == ~[@"str"/~] { true }
|
||||||
_ { false }
|
_ { false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -529,12 +529,12 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
|
||||||
|
|
||||||
let ser_inputs: ~[ast::arg] =
|
let ser_inputs: ~[ast::arg] =
|
||||||
vec::append(~[{mode: ast::expl(ast::by_ref),
|
vec::append(~[{mode: ast::expl(ast::by_ref),
|
||||||
ty: cx.ty_path(span, ~[@"__S"], ~[]),
|
ty: cx.ty_path(span, ~[@"__S"/~], ~[]),
|
||||||
ident: @"__s",
|
ident: @"__s"/~,
|
||||||
id: cx.next_id()},
|
id: cx.next_id()},
|
||||||
{mode: ast::expl(ast::by_ref),
|
{mode: ast::expl(ast::by_ref),
|
||||||
ty: v_ty,
|
ty: v_ty,
|
||||||
ident: @"__v",
|
ident: @"__v"/~,
|
||||||
id: cx.next_id()}],
|
id: cx.next_id()}],
|
||||||
tp_inputs);
|
tp_inputs);
|
||||||
|
|
||||||
|
@ -552,12 +552,12 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
|
||||||
|
|
||||||
let ser_bnds = @~[
|
let ser_bnds = @~[
|
||||||
ast::bound_trait(cx.ty_path(span,
|
ast::bound_trait(cx.ty_path(span,
|
||||||
~[@"std", @"serialization",
|
~[@"std"/~, @"serialization"/~,
|
||||||
@"serializer"],
|
@"serializer"/~],
|
||||||
~[]))];
|
~[]))];
|
||||||
|
|
||||||
let ser_tps: ~[ast::ty_param] =
|
let ser_tps: ~[ast::ty_param] =
|
||||||
vec::append(~[{ident: @"__S",
|
vec::append(~[{ident: @"__S"/~,
|
||||||
id: cx.next_id(),
|
id: cx.next_id(),
|
||||||
bounds: ser_bnds}],
|
bounds: ser_bnds}],
|
||||||
vec::map(tps, |tp| cx.clone_ty_param(tp)));
|
vec::map(tps, |tp| cx.clone_ty_param(tp)));
|
||||||
|
@ -749,8 +749,8 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
|
||||||
|
|
||||||
let deser_inputs: ~[ast::arg] =
|
let deser_inputs: ~[ast::arg] =
|
||||||
vec::append(~[{mode: ast::expl(ast::by_ref),
|
vec::append(~[{mode: ast::expl(ast::by_ref),
|
||||||
ty: cx.ty_path(span, ~[@"__D"], ~[]),
|
ty: cx.ty_path(span, ~[@"__D"/~], ~[]),
|
||||||
ident: @"__d",
|
ident: @"__d"/~,
|
||||||
id: cx.next_id()}],
|
id: cx.next_id()}],
|
||||||
tp_inputs);
|
tp_inputs);
|
||||||
|
|
||||||
|
@ -768,11 +768,11 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
|
||||||
let deser_bnds = @~[
|
let deser_bnds = @~[
|
||||||
ast::bound_trait(cx.ty_path(
|
ast::bound_trait(cx.ty_path(
|
||||||
span,
|
span,
|
||||||
~[@"std", @"serialization", @"deserializer"],
|
~[@"std"/~, @"serialization"/~, @"deserializer"/~],
|
||||||
~[]))];
|
~[]))];
|
||||||
|
|
||||||
let deser_tps: ~[ast::ty_param] =
|
let deser_tps: ~[ast::ty_param] =
|
||||||
vec::append(~[{ident: @"__D",
|
vec::append(~[{ident: @"__D"/~,
|
||||||
id: cx.next_id(),
|
id: cx.next_id(),
|
||||||
bounds: deser_bnds}],
|
bounds: deser_bnds}],
|
||||||
vec::map(tps, |tp| {
|
vec::map(tps, |tp| {
|
||||||
|
|
|
@ -38,7 +38,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
pieces: ~[piece], args: ~[@ast::expr])
|
pieces: ~[piece], args: ~[@ast::expr])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> ~[ast::ident] {
|
fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> ~[ast::ident] {
|
||||||
ret ~[@"extfmt", @"rt", ident];
|
ret ~[@"extfmt"/~, @"rt"/~, ident];
|
||||||
}
|
}
|
||||||
fn make_rt_path_expr(cx: ext_ctxt, sp: span,
|
fn make_rt_path_expr(cx: ext_ctxt, sp: span,
|
||||||
ident: ast::ident) -> @ast::expr {
|
ident: ast::ident) -> @ast::expr {
|
||||||
|
@ -50,7 +50,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
|
|
||||||
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
|
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
|
||||||
fn make_flags(cx: ext_ctxt, sp: span, flags: ~[flag]) -> @ast::expr {
|
fn make_flags(cx: ext_ctxt, sp: span, flags: ~[flag]) -> @ast::expr {
|
||||||
let mut tmp_expr = make_rt_path_expr(cx, sp, @"flag_none");
|
let mut tmp_expr = make_rt_path_expr(cx, sp, @"flag_none"/~);
|
||||||
for flags.each |f| {
|
for flags.each |f| {
|
||||||
let fstr = alt f {
|
let fstr = alt f {
|
||||||
flag_left_justify { "flag_left_justify" }
|
flag_left_justify { "flag_left_justify" }
|
||||||
|
@ -67,11 +67,11 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
|
fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
|
||||||
alt cnt {
|
alt cnt {
|
||||||
count_implied {
|
count_implied {
|
||||||
ret make_rt_path_expr(cx, sp, @"count_implied");
|
ret make_rt_path_expr(cx, sp, @"count_implied"/~);
|
||||||
}
|
}
|
||||||
count_is(c) {
|
count_is(c) {
|
||||||
let count_lit = mk_int(cx, sp, c);
|
let count_lit = mk_int(cx, sp, c);
|
||||||
let count_is_path = make_path_vec(cx, @"count_is");
|
let count_is_path = make_path_vec(cx, @"count_is"/~);
|
||||||
let count_is_args = ~[count_lit];
|
let count_is_args = ~[count_lit];
|
||||||
ret mk_call(cx, sp, count_is_path, count_is_args);
|
ret mk_call(cx, sp, count_is_path, count_is_args);
|
||||||
}
|
}
|
||||||
|
@ -97,10 +97,10 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
width_expr: @ast::expr, precision_expr: @ast::expr,
|
width_expr: @ast::expr, precision_expr: @ast::expr,
|
||||||
ty_expr: @ast::expr) -> @ast::expr {
|
ty_expr: @ast::expr) -> @ast::expr {
|
||||||
ret mk_rec_e(cx, sp,
|
ret mk_rec_e(cx, sp,
|
||||||
~[{ident: @"flags", ex: flags_expr},
|
~[{ident: @"flags"/~, ex: flags_expr},
|
||||||
{ident: @"width", ex: width_expr},
|
{ident: @"width"/~, ex: width_expr},
|
||||||
{ident: @"precision", ex: precision_expr},
|
{ident: @"precision"/~, ex: precision_expr},
|
||||||
{ident: @"ty", ex: ty_expr}]);
|
{ident: @"ty"/~, ex: ty_expr}]);
|
||||||
}
|
}
|
||||||
let rt_conv_flags = make_flags(cx, sp, cnv.flags);
|
let rt_conv_flags = make_flags(cx, sp, cnv.flags);
|
||||||
let rt_conv_width = make_count(cx, sp, cnv.width);
|
let rt_conv_width = make_count(cx, sp, cnv.width);
|
||||||
|
@ -275,7 +275,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
}
|
}
|
||||||
|
|
||||||
let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs);
|
let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs);
|
||||||
ret mk_call(cx, fmt_sp, ~[@"str", @"concat"], ~[arg_vec]);
|
ret mk_call(cx, fmt_sp, ~[@"str"/~, @"concat"/~], ~[arg_vec]);
|
||||||
}
|
}
|
||||||
//
|
//
|
||||||
// Local Variables:
|
// Local Variables:
|
||||||
|
|
|
@ -28,8 +28,8 @@ impl proto_parser for parser {
|
||||||
};
|
};
|
||||||
self.bump();
|
self.bump();
|
||||||
let dir = alt dir {
|
let dir = alt dir {
|
||||||
@"send" { send }
|
@"send"/~ { send }
|
||||||
@"recv" { recv }
|
@"recv"/~ { recv }
|
||||||
_ { fail }
|
_ { fail }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ impl methods for message {
|
||||||
);
|
);
|
||||||
|
|
||||||
let args_ast = vec::append(
|
let args_ast = vec::append(
|
||||||
~[cx.arg_mode(@"pipe",
|
~[cx.arg_mode(@"pipe"/~,
|
||||||
cx.ty_path(path(this.data_name())
|
cx.ty_path(path(this.data_name())
|
||||||
.add_tys(cx.ty_vars(this.ty_params))),
|
.add_tys(cx.ty_vars(this.ty_params))),
|
||||||
ast::by_copy)],
|
ast::by_copy)],
|
||||||
|
@ -92,7 +92,7 @@ impl methods for message {
|
||||||
body += #fmt("let message = %s::%s(%s);\n",
|
body += #fmt("let message = %s::%s(%s);\n",
|
||||||
*this.proto.name,
|
*this.proto.name,
|
||||||
*self.name(),
|
*self.name(),
|
||||||
str::connect(vec::append_one(arg_names, @"s")
|
str::connect(vec::append_one(arg_names, @"s"/~)
|
||||||
.map(|x| *x),
|
.map(|x| *x),
|
||||||
", "));
|
", "));
|
||||||
body += #fmt("pipes::send(pipe, message);\n");
|
body += #fmt("pipes::send(pipe, message);\n");
|
||||||
|
@ -158,8 +158,8 @@ impl methods for state {
|
||||||
let next_name = next.data_name();
|
let next_name = next.data_name();
|
||||||
|
|
||||||
let dir = alt this.dir {
|
let dir = alt this.dir {
|
||||||
send { @"server" }
|
send { @"server"/~ }
|
||||||
recv { @"client" }
|
recv { @"client"/~ }
|
||||||
};
|
};
|
||||||
|
|
||||||
let v = cx.variant(name,
|
let v = cx.variant(name,
|
||||||
|
@ -190,7 +190,7 @@ impl methods for state {
|
||||||
cx.item_ty_poly(
|
cx.item_ty_poly(
|
||||||
self.data_name(),
|
self.data_name(),
|
||||||
cx.ty_path(
|
cx.ty_path(
|
||||||
(@"pipes" + @(dir.to_str() + "_packet"))
|
(@"pipes"/~ + @(dir.to_str() + "_packet"/~))
|
||||||
.add_ty(cx.ty_path(
|
.add_ty(cx.ty_path(
|
||||||
(self.proto.name + self.data_name())
|
(self.proto.name + self.data_name())
|
||||||
.add_tys(cx.ty_vars(self.ty_params))))),
|
.add_tys(cx.ty_vars(self.ty_params))))),
|
||||||
|
@ -281,10 +281,10 @@ impl methods for protocol {
|
||||||
}
|
}
|
||||||
|
|
||||||
vec::push(items,
|
vec::push(items,
|
||||||
cx.item_mod(@"client",
|
cx.item_mod(@"client"/~,
|
||||||
client_states));
|
client_states));
|
||||||
vec::push(items,
|
vec::push(items,
|
||||||
cx.item_mod(@"server",
|
cx.item_mod(@"server"/~,
|
||||||
server_states));
|
server_states));
|
||||||
|
|
||||||
cx.item_mod(self.name, items)
|
cx.item_mod(self.name, items)
|
||||||
|
|
|
@ -35,7 +35,8 @@ impl of qq_helper for @ast::crate {
|
||||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
|
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
|
||||||
fn extract_mac() -> option<ast::mac_> {fail}
|
fn extract_mac() -> option<ast::mac_> {fail}
|
||||||
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||||
mk_path(cx, sp, ~[@"syntax", @"ext", @"qquote", @"parse_crate"])
|
mk_path(cx, sp,
|
||||||
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_crate"/~])
|
||||||
}
|
}
|
||||||
fn get_fold_fn() -> str {"fold_crate"}
|
fn get_fold_fn() -> str {"fold_crate"}
|
||||||
}
|
}
|
||||||
|
@ -49,7 +50,8 @@ impl of qq_helper for @ast::expr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||||
mk_path(cx, sp, ~[@"syntax", @"ext", @"qquote", @"parse_expr"])
|
mk_path(cx, sp,
|
||||||
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_expr"/~])
|
||||||
}
|
}
|
||||||
fn get_fold_fn() -> str {"fold_expr"}
|
fn get_fold_fn() -> str {"fold_expr"}
|
||||||
}
|
}
|
||||||
|
@ -63,7 +65,8 @@ impl of qq_helper for @ast::ty {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||||
mk_path(cx, sp, ~[@"syntax", @"ext", @"qquote", @"parse_ty"])
|
mk_path(cx, sp,
|
||||||
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_ty"/~])
|
||||||
}
|
}
|
||||||
fn get_fold_fn() -> str {"fold_ty"}
|
fn get_fold_fn() -> str {"fold_ty"}
|
||||||
}
|
}
|
||||||
|
@ -72,7 +75,8 @@ impl of qq_helper for @ast::item {
|
||||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
|
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
|
||||||
fn extract_mac() -> option<ast::mac_> {fail}
|
fn extract_mac() -> option<ast::mac_> {fail}
|
||||||
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||||
mk_path(cx, sp, ~[@"syntax", @"ext", @"qquote", @"parse_item"])
|
mk_path(cx, sp,
|
||||||
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_item"/~])
|
||||||
}
|
}
|
||||||
fn get_fold_fn() -> str {"fold_item"}
|
fn get_fold_fn() -> str {"fold_item"}
|
||||||
}
|
}
|
||||||
|
@ -81,7 +85,8 @@ impl of qq_helper for @ast::stmt {
|
||||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
|
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
|
||||||
fn extract_mac() -> option<ast::mac_> {fail}
|
fn extract_mac() -> option<ast::mac_> {fail}
|
||||||
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||||
mk_path(cx, sp, ~[@"syntax", @"ext", @"qquote", @"parse_stmt"])
|
mk_path(cx, sp,
|
||||||
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_stmt"/~])
|
||||||
}
|
}
|
||||||
fn get_fold_fn() -> str {"fold_stmt"}
|
fn get_fold_fn() -> str {"fold_stmt"}
|
||||||
}
|
}
|
||||||
|
@ -90,7 +95,7 @@ impl of qq_helper for @ast::pat {
|
||||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
|
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
|
||||||
fn extract_mac() -> option<ast::mac_> {fail}
|
fn extract_mac() -> option<ast::mac_> {fail}
|
||||||
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||||
mk_path(cx, sp, ~[@"syntax", @"ext", @"qquote", @"parse_pat"])
|
mk_path(cx, sp, ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_pat"/~])
|
||||||
}
|
}
|
||||||
fn get_fold_fn() -> str {"fold_pat"}
|
fn get_fold_fn() -> str {"fold_pat"}
|
||||||
}
|
}
|
||||||
|
@ -228,19 +233,19 @@ fn finish<T: qq_helper>
|
||||||
let cx = ecx;
|
let cx = ecx;
|
||||||
|
|
||||||
let cfg_call = || mk_call_(
|
let cfg_call = || mk_call_(
|
||||||
cx, sp, mk_access(cx, sp, ~[@"ext_cx"], @"cfg"), ~[]);
|
cx, sp, mk_access(cx, sp, ~[@"ext_cx"/~], @"cfg"/~), ~[]);
|
||||||
|
|
||||||
let parse_sess_call = || mk_call_(
|
let parse_sess_call = || mk_call_(
|
||||||
cx, sp, mk_access(cx, sp, ~[@"ext_cx"], @"parse_sess"), ~[]);
|
cx, sp, mk_access(cx, sp, ~[@"ext_cx"/~], @"parse_sess"/~), ~[]);
|
||||||
|
|
||||||
let pcall = mk_call(cx,sp,
|
let pcall = mk_call(cx,sp,
|
||||||
~[@"syntax", @"parse", @"parser",
|
~[@"syntax"/~, @"parse"/~, @"parser"/~,
|
||||||
@"parse_from_source_str"],
|
@"parse_from_source_str"/~],
|
||||||
~[node.mk_parse_fn(cx,sp),
|
~[node.mk_parse_fn(cx,sp),
|
||||||
mk_str(cx,sp, fname),
|
mk_str(cx,sp, fname),
|
||||||
mk_call(cx,sp,
|
mk_call(cx,sp,
|
||||||
~[@"syntax",@"ext",
|
~[@"syntax"/~,@"ext"/~,
|
||||||
@"qquote", @"mk_file_substr"],
|
@"qquote"/~, @"mk_file_substr"/~],
|
||||||
~[mk_str(cx,sp, loc.file.name),
|
~[mk_str(cx,sp, loc.file.name),
|
||||||
mk_uint(cx,sp, loc.line),
|
mk_uint(cx,sp, loc.line),
|
||||||
mk_uint(cx,sp, loc.col)]),
|
mk_uint(cx,sp, loc.col)]),
|
||||||
|
@ -252,15 +257,15 @@ fn finish<T: qq_helper>
|
||||||
let mut rcall = pcall;
|
let mut rcall = pcall;
|
||||||
if (g_len > 0u) {
|
if (g_len > 0u) {
|
||||||
rcall = mk_call(cx,sp,
|
rcall = mk_call(cx,sp,
|
||||||
~[@"syntax", @"ext", @"qquote", @"replace"],
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"replace"/~],
|
||||||
~[pcall,
|
~[pcall,
|
||||||
mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| {
|
mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| {
|
||||||
mk_call(cx,sp,
|
mk_call(cx,sp,
|
||||||
~[@"syntax", @"ext",
|
~[@"syntax"/~, @"ext"/~,
|
||||||
@"qquote", @g.constr],
|
@"qquote"/~, @g.constr],
|
||||||
~[g.e])})),
|
~[g.e])})),
|
||||||
mk_path(cx,sp,
|
mk_path(cx,sp,
|
||||||
~[@"syntax", @"ext", @"qquote",
|
~[@"syntax"/~, @"ext"/~, @"qquote"/~,
|
||||||
@node.get_fold_fn()])]);
|
@node.get_fold_fn()])]);
|
||||||
}
|
}
|
||||||
ret rcall;
|
ret rcall;
|
||||||
|
|
|
@ -18,9 +18,9 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
|
|
||||||
let argument_gram = ~[
|
let argument_gram = ~[
|
||||||
ms(mtc_rep(~[
|
ms(mtc_rep(~[
|
||||||
ms(mtc_bb(@"lhs",@"mtcs", 0u)),
|
ms(mtc_bb(@"lhs"/~,@"mtcs"/~, 0u)),
|
||||||
ms(mtc_tok(FAT_ARROW)),
|
ms(mtc_tok(FAT_ARROW)),
|
||||||
ms(mtc_bb(@"rhs",@"tt", 1u)),
|
ms(mtc_bb(@"rhs"/~,@"tt"/~, 1u)),
|
||||||
], some(SEMI), false))];
|
], some(SEMI), false))];
|
||||||
|
|
||||||
let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
|
let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
|
||||||
|
@ -31,11 +31,11 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
failure(sp, msg) { cx.span_fatal(sp, msg); }
|
failure(sp, msg) { cx.span_fatal(sp, msg); }
|
||||||
};
|
};
|
||||||
|
|
||||||
let lhses = alt arguments.get(@"lhs") {
|
let lhses = alt arguments.get(@"lhs"/~) {
|
||||||
@seq(s, sp) { s }
|
@seq(s, sp) { s }
|
||||||
_ { cx.span_bug(sp, "wrong-structured lhs") }
|
_ { cx.span_bug(sp, "wrong-structured lhs") }
|
||||||
};
|
};
|
||||||
let rhses = alt arguments.get(@"rhs") {
|
let rhses = alt arguments.get(@"rhs"/~) {
|
||||||
@seq(s, sp) { s }
|
@seq(s, sp) { s }
|
||||||
_ { cx.span_bug(sp, "wrong-structured rhs") }
|
_ { cx.span_bug(sp, "wrong-structured rhs") }
|
||||||
};
|
};
|
||||||
|
|
|
@ -259,7 +259,7 @@ class parser {
|
||||||
let name = self.parse_value_ident();
|
let name = self.parse_value_ident();
|
||||||
p.bump();
|
p.bump();
|
||||||
name
|
name
|
||||||
} else { @"" };
|
} else { @""/~ };
|
||||||
|
|
||||||
{mode: mode, ty: p.parse_ty(false), ident: name,
|
{mode: mode, ty: p.parse_ty(false), ident: name,
|
||||||
id: p.get_id()}
|
id: p.get_id()}
|
||||||
|
@ -1337,7 +1337,8 @@ class parser {
|
||||||
hi = e.span.hi;
|
hi = e.span.hi;
|
||||||
// HACK: turn &[...] into a &-evec
|
// HACK: turn &[...] into a &-evec
|
||||||
ex = alt e.node {
|
ex = alt e.node {
|
||||||
expr_vec(*) if m == m_imm {
|
expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
|
||||||
|
if m == m_imm {
|
||||||
expr_vstore(e, vstore_slice(self.region_from_name(none)))
|
expr_vstore(e, vstore_slice(self.region_from_name(none)))
|
||||||
}
|
}
|
||||||
_ { expr_addr_of(m, e) }
|
_ { expr_addr_of(m, e) }
|
||||||
|
@ -1353,7 +1354,8 @@ class parser {
|
||||||
hi = e.span.hi;
|
hi = e.span.hi;
|
||||||
// HACK: turn @[...] into a @-evec
|
// HACK: turn @[...] into a @-evec
|
||||||
ex = alt e.node {
|
ex = alt e.node {
|
||||||
expr_vec(*) if m == m_imm { expr_vstore(e, vstore_box) }
|
expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
|
||||||
|
if m == m_imm { expr_vstore(e, vstore_box) }
|
||||||
_ { expr_unary(box(m), e) }
|
_ { expr_unary(box(m), e) }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1364,7 +1366,8 @@ class parser {
|
||||||
hi = e.span.hi;
|
hi = e.span.hi;
|
||||||
// HACK: turn ~[...] into a ~-evec
|
// HACK: turn ~[...] into a ~-evec
|
||||||
ex = alt e.node {
|
ex = alt e.node {
|
||||||
expr_vec(*) if m == m_imm { expr_vstore(e, vstore_uniq) }
|
expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
|
||||||
|
if m == m_imm { expr_vstore(e, vstore_uniq) }
|
||||||
_ { expr_unary(uniq(m), e) }
|
_ { expr_unary(uniq(m), e) }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -2134,12 +2137,16 @@ class parser {
|
||||||
fn parse_method_name() -> ident {
|
fn parse_method_name() -> ident {
|
||||||
alt copy self.token {
|
alt copy self.token {
|
||||||
token::BINOP(op) { self.bump(); @token::binop_to_str(op) }
|
token::BINOP(op) { self.bump(); @token::binop_to_str(op) }
|
||||||
token::NOT { self.bump(); @"!" }
|
token::NOT { self.bump(); @"!"/~ }
|
||||||
token::LBRACKET { self.bump(); self.expect(token::RBRACKET); @"[]" }
|
token::LBRACKET {
|
||||||
|
self.bump();
|
||||||
|
self.expect(token::RBRACKET);
|
||||||
|
@"[]"/~
|
||||||
|
}
|
||||||
_ {
|
_ {
|
||||||
let id = self.parse_value_ident();
|
let id = self.parse_value_ident();
|
||||||
if id == @"unary" && self.eat(token::BINOP(token::MINUS)) {
|
if id == @"unary"/~ && self.eat(token::BINOP(token::MINUS)) {
|
||||||
@"unary-"
|
@"unary-"/~
|
||||||
}
|
}
|
||||||
else { id }
|
else { id }
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,8 +28,8 @@ fn name_of_fn(fk: fn_kind) -> ident {
|
||||||
alt fk {
|
alt fk {
|
||||||
fk_item_fn(name, _) | fk_method(name, _, _)
|
fk_item_fn(name, _) | fk_method(name, _, _)
|
||||||
| fk_ctor(name, _, _, _) { /* FIXME (#2543) */ copy name }
|
| fk_ctor(name, _, _, _) { /* FIXME (#2543) */ copy name }
|
||||||
fk_anon(*) | fk_fn_block(*) { @"anon" }
|
fk_anon(*) | fk_fn_block(*) { @"anon"/~ }
|
||||||
fk_dtor(*) { @"drop" }
|
fk_dtor(*) { @"drop"/~ }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,13 +51,13 @@ fn default_configuration(sess: session, argv0: str, input: input) ->
|
||||||
|
|
||||||
ret ~[ // Target bindings.
|
ret ~[ // Target bindings.
|
||||||
attr::mk_word_item(@os::family()),
|
attr::mk_word_item(@os::family()),
|
||||||
mk(@"target_os", os::sysname()),
|
mk(@"target_os"/~, os::sysname()),
|
||||||
mk(@"target_family", os::family()),
|
mk(@"target_family"/~, os::family()),
|
||||||
mk(@"target_arch", arch),
|
mk(@"target_arch"/~, arch),
|
||||||
mk(@"target_libc", libc),
|
mk(@"target_libc"/~, libc),
|
||||||
// Build bindings.
|
// Build bindings.
|
||||||
mk(@"build_compiler", argv0),
|
mk(@"build_compiler"/~, argv0),
|
||||||
mk(@"build_input", source_name(input))];
|
mk(@"build_input"/~, source_name(input))];
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_configuration(sess: session, argv0: str, input: input) ->
|
fn build_configuration(sess: session, argv0: str, input: input) ->
|
||||||
|
@ -71,7 +71,7 @@ fn build_configuration(sess: session, argv0: str, input: input) ->
|
||||||
{
|
{
|
||||||
if sess.opts.test && !attr::contains_name(user_cfg, "test")
|
if sess.opts.test && !attr::contains_name(user_cfg, "test")
|
||||||
{
|
{
|
||||||
~[attr::mk_word_item(@"test")]
|
~[attr::mk_word_item(@"test"/~)]
|
||||||
} else { ~[] }
|
} else { ~[] }
|
||||||
};
|
};
|
||||||
ret vec::append(vec::append(user_cfg, gen_cfg), default_cfg);
|
ret vec::append(vec::append(user_cfg, gen_cfg), default_cfg);
|
||||||
|
|
|
@ -217,7 +217,7 @@ fn building_library(req_crate_type: crate_type, crate: @ast::crate,
|
||||||
alt syntax::attr::first_attr_value_str_by_name(
|
alt syntax::attr::first_attr_value_str_by_name(
|
||||||
crate.node.attrs,
|
crate.node.attrs,
|
||||||
"crate_type") {
|
"crate_type") {
|
||||||
option::some(@"lib") { true }
|
option::some(@"lib"/~) { true }
|
||||||
_ { false }
|
_ { false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -245,7 +245,7 @@ mod test {
|
||||||
style: ast::attr_outer,
|
style: ast::attr_outer,
|
||||||
value: ast_util::respan(ast_util::dummy_sp(),
|
value: ast_util::respan(ast_util::dummy_sp(),
|
||||||
ast::meta_name_value(
|
ast::meta_name_value(
|
||||||
@"crate_type",
|
@"crate_type"/~,
|
||||||
ast_util::respan(ast_util::dummy_sp(),
|
ast_util::respan(ast_util::dummy_sp(),
|
||||||
ast::lit_str(@t)))),
|
ast::lit_str(@t)))),
|
||||||
is_sugared_doc: false
|
is_sugared_doc: false
|
||||||
|
|
|
@ -30,11 +30,11 @@ fn inject_libcore_ref(sess: session,
|
||||||
let n1 = sess.next_node_id();
|
let n1 = sess.next_node_id();
|
||||||
let n2 = sess.next_node_id();
|
let n2 = sess.next_node_id();
|
||||||
|
|
||||||
let vi1 = @{node: ast::view_item_use(@"core", ~[], n1),
|
let vi1 = @{node: ast::view_item_use(@"core"/~, ~[], n1),
|
||||||
attrs: ~[],
|
attrs: ~[],
|
||||||
vis: ast::public,
|
vis: ast::public,
|
||||||
span: dummy_sp()};
|
span: dummy_sp()};
|
||||||
let vp = spanned(ast::view_path_glob(ident_to_path(dummy_sp(), @"core"),
|
let vp = spanned(ast::view_path_glob(ident_to_path(dummy_sp(), @"core"/~),
|
||||||
n2));
|
n2));
|
||||||
let vi2 = @{node: ast::view_item_import(~[vp]),
|
let vi2 = @{node: ast::view_item_import(~[vp]),
|
||||||
attrs: ~[],
|
attrs: ~[],
|
||||||
|
|
|
@ -192,9 +192,9 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
|
||||||
let item_ = ast::item_mod(testmod);
|
let item_ = ast::item_mod(testmod);
|
||||||
// This attribute tells resolve to let us call unexported functions
|
// This attribute tells resolve to let us call unexported functions
|
||||||
let resolve_unexported_attr =
|
let resolve_unexported_attr =
|
||||||
attr::mk_attr(attr::mk_word_item(@"!resolve_unexported"));
|
attr::mk_attr(attr::mk_word_item(@"!resolve_unexported"/~));
|
||||||
let item: ast::item =
|
let item: ast::item =
|
||||||
{ident: @"__test",
|
{ident: @"__test"/~,
|
||||||
attrs: ~[resolve_unexported_attr],
|
attrs: ~[resolve_unexported_attr],
|
||||||
id: cx.sess.next_node_id(),
|
id: cx.sess.next_node_id(),
|
||||||
node: item_,
|
node: item_,
|
||||||
|
@ -233,7 +233,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
|
||||||
|
|
||||||
let item_ = ast::item_fn(decl, ~[], body);
|
let item_ = ast::item_fn(decl, ~[], body);
|
||||||
let item: ast::item =
|
let item: ast::item =
|
||||||
{ident: @"tests",
|
{ident: @"tests"/~,
|
||||||
attrs: ~[],
|
attrs: ~[],
|
||||||
id: cx.sess.next_node_id(),
|
id: cx.sess.next_node_id(),
|
||||||
node: item_,
|
node: item_,
|
||||||
|
@ -248,17 +248,18 @@ fn mk_path(cx: test_ctxt, path: ~[ast::ident]) -> ~[ast::ident] {
|
||||||
let is_std = {
|
let is_std = {
|
||||||
let items = attr::find_linkage_metas(cx.crate.node.attrs);
|
let items = attr::find_linkage_metas(cx.crate.node.attrs);
|
||||||
alt attr::last_meta_item_value_str_by_name(items, "name") {
|
alt attr::last_meta_item_value_str_by_name(items, "name") {
|
||||||
some(@"std") { true }
|
some(@"std"/~) { true }
|
||||||
_ { false }
|
_ { false }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if is_std { path }
|
if is_std { path }
|
||||||
else { vec::append(~[@"std"], path) }
|
else { vec::append(~[@"std"/~], path) }
|
||||||
}
|
}
|
||||||
|
|
||||||
// The ast::ty of ~[std::test::test_desc]
|
// The ast::ty of ~[std::test::test_desc]
|
||||||
fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
|
fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
|
||||||
let test_desc_ty_path = path_node(mk_path(cx, ~[@"test", @"test_desc"]));
|
let test_desc_ty_path =
|
||||||
|
path_node(mk_path(cx, ~[@"test"/~, @"test_desc"/~]));
|
||||||
|
|
||||||
let test_desc_ty: ast::ty =
|
let test_desc_ty: ast::ty =
|
||||||
{id: cx.sess.next_node_id(),
|
{id: cx.sess.next_node_id(),
|
||||||
|
@ -307,7 +308,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
||||||
span: span};
|
span: span};
|
||||||
|
|
||||||
let name_field: ast::field =
|
let name_field: ast::field =
|
||||||
nospan({mutbl: ast::m_imm, ident: @"name", expr: @name_expr});
|
nospan({mutbl: ast::m_imm, ident: @"name"/~, expr: @name_expr});
|
||||||
|
|
||||||
let fn_path = path_node(path);
|
let fn_path = path_node(path);
|
||||||
|
|
||||||
|
@ -320,7 +321,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
||||||
let fn_wrapper_expr = mk_test_wrapper(cx, fn_expr, span);
|
let fn_wrapper_expr = mk_test_wrapper(cx, fn_expr, span);
|
||||||
|
|
||||||
let fn_field: ast::field =
|
let fn_field: ast::field =
|
||||||
nospan({mutbl: ast::m_imm, ident: @"fn", expr: fn_wrapper_expr});
|
nospan({mutbl: ast::m_imm, ident: @"fn"/~, expr: fn_wrapper_expr});
|
||||||
|
|
||||||
let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore));
|
let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore));
|
||||||
|
|
||||||
|
@ -331,7 +332,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
||||||
span: span};
|
span: span};
|
||||||
|
|
||||||
let ignore_field: ast::field =
|
let ignore_field: ast::field =
|
||||||
nospan({mutbl: ast::m_imm, ident: @"ignore", expr: @ignore_expr});
|
nospan({mutbl: ast::m_imm, ident: @"ignore"/~, expr: @ignore_expr});
|
||||||
|
|
||||||
let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail));
|
let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail));
|
||||||
|
|
||||||
|
@ -342,7 +343,9 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
||||||
span: span};
|
span: span};
|
||||||
|
|
||||||
let fail_field: ast::field =
|
let fail_field: ast::field =
|
||||||
nospan({mutbl: ast::m_imm, ident: @"should_fail", expr: @fail_expr});
|
nospan({mutbl: ast::m_imm,
|
||||||
|
ident: @"should_fail"/~,
|
||||||
|
expr: @fail_expr});
|
||||||
|
|
||||||
let desc_rec_: ast::expr_ =
|
let desc_rec_: ast::expr_ =
|
||||||
ast::expr_rec(~[name_field, fn_field, ignore_field, fail_field],
|
ast::expr_rec(~[name_field, fn_field, ignore_field, fail_field],
|
||||||
|
@ -397,7 +400,7 @@ fn mk_test_wrapper(cx: test_ctxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_main(cx: test_ctxt) -> @ast::item {
|
fn mk_main(cx: test_ctxt) -> @ast::item {
|
||||||
let str_pt = path_node(~[@"str"]);
|
let str_pt = path_node(~[@"str"/~]);
|
||||||
let str_ty = @{id: cx.sess.next_node_id(),
|
let str_ty = @{id: cx.sess.next_node_id(),
|
||||||
node: ast::ty_path(str_pt, cx.sess.next_node_id()),
|
node: ast::ty_path(str_pt, cx.sess.next_node_id()),
|
||||||
span: dummy_sp()};
|
span: dummy_sp()};
|
||||||
|
@ -413,7 +416,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
|
||||||
let args_arg: ast::arg =
|
let args_arg: ast::arg =
|
||||||
{mode: ast::expl(ast::by_val),
|
{mode: ast::expl(ast::by_val),
|
||||||
ty: @args_ty,
|
ty: @args_ty,
|
||||||
ident: @"args",
|
ident: @"args"/~,
|
||||||
id: cx.sess.next_node_id()};
|
id: cx.sess.next_node_id()};
|
||||||
|
|
||||||
let ret_ty = {id: cx.sess.next_node_id(),
|
let ret_ty = {id: cx.sess.next_node_id(),
|
||||||
|
@ -436,7 +439,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
|
||||||
|
|
||||||
let item_ = ast::item_fn(decl, ~[], body);
|
let item_ = ast::item_fn(decl, ~[], body);
|
||||||
let item: ast::item =
|
let item: ast::item =
|
||||||
{ident: @"main",
|
{ident: @"main"/~,
|
||||||
attrs: ~[],
|
attrs: ~[],
|
||||||
id: cx.sess.next_node_id(),
|
id: cx.sess.next_node_id(),
|
||||||
node: item_,
|
node: item_,
|
||||||
|
@ -448,7 +451,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
|
||||||
fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
|
fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
|
||||||
|
|
||||||
// Get the args passed to main so we can pass the to test_main
|
// Get the args passed to main so we can pass the to test_main
|
||||||
let args_path = path_node(~[@"args"]);
|
let args_path = path_node(~[@"args"/~]);
|
||||||
|
|
||||||
let args_path_expr_: ast::expr_ = ast::expr_path(args_path);
|
let args_path_expr_: ast::expr_ = ast::expr_path(args_path);
|
||||||
|
|
||||||
|
@ -457,7 +460,7 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
|
||||||
node: args_path_expr_, span: dummy_sp()};
|
node: args_path_expr_, span: dummy_sp()};
|
||||||
|
|
||||||
// Call __test::test to generate the vector of test_descs
|
// Call __test::test to generate the vector of test_descs
|
||||||
let test_path = path_node(~[@"tests"]);
|
let test_path = path_node(~[@"tests"/~]);
|
||||||
|
|
||||||
let test_path_expr_: ast::expr_ = ast::expr_path(test_path);
|
let test_path_expr_: ast::expr_ = ast::expr_path(test_path);
|
||||||
|
|
||||||
|
@ -472,7 +475,7 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
|
||||||
node: test_call_expr_, span: dummy_sp()};
|
node: test_call_expr_, span: dummy_sp()};
|
||||||
|
|
||||||
// Call std::test::test_main
|
// Call std::test::test_main
|
||||||
let test_main_path = path_node(mk_path(cx, ~[@"test", @"test_main"]));
|
let test_main_path = path_node(mk_path(cx, ~[@"test"/~, @"test_main"/~]));
|
||||||
|
|
||||||
let test_main_path_expr_: ast::expr_ = ast::expr_path(test_main_path);
|
let test_main_path_expr_: ast::expr_ = ast::expr_path(test_main_path);
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,7 @@ fn dump_crates(crate_cache: dvec<cache_entry>) {
|
||||||
#debug("span: %?", entry.span);
|
#debug("span: %?", entry.span);
|
||||||
#debug("hash: %?", entry.hash);
|
#debug("hash: %?", entry.hash);
|
||||||
let attrs = ~[
|
let attrs = ~[
|
||||||
attr::mk_attr(attr::mk_list_item(@"link", *entry.metas))
|
attr::mk_attr(attr::mk_list_item(@"link"/~, *entry.metas))
|
||||||
];
|
];
|
||||||
for attr::find_linkage_attrs(attrs).each |attr| {
|
for attr::find_linkage_attrs(attrs).each |attr| {
|
||||||
#debug("meta: %s", pprust::attr_to_str(attr));
|
#debug("meta: %s", pprust::attr_to_str(attr));
|
||||||
|
@ -81,7 +81,7 @@ fn warn_if_multiple_versions(diag: span_handler,
|
||||||
for matches.each |match| {
|
for matches.each |match| {
|
||||||
diag.span_note(match.span, "used here");
|
diag.span_note(match.span, "used here");
|
||||||
let attrs = ~[
|
let attrs = ~[
|
||||||
attr::mk_attr(attr::mk_list_item(@"link", *match.metas))
|
attr::mk_attr(attr::mk_list_item(@"link"/~, *match.metas))
|
||||||
];
|
];
|
||||||
loader::note_linkage_attrs(diag, attrs);
|
loader::note_linkage_attrs(diag, attrs);
|
||||||
}
|
}
|
||||||
|
@ -168,7 +168,7 @@ fn metas_with(ident: ast::ident, key: ast::ident,
|
||||||
|
|
||||||
fn metas_with_ident(ident: ast::ident,
|
fn metas_with_ident(ident: ast::ident,
|
||||||
metas: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
|
metas: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
|
||||||
metas_with(ident, @"name", metas)
|
metas_with(ident, @"name"/~, metas)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn existing_match(e: env, metas: ~[@ast::meta_item], hash: str) ->
|
fn existing_match(e: env, metas: ~[@ast::meta_item], hash: str) ->
|
||||||
|
@ -245,7 +245,7 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
|
||||||
for decoder::get_crate_deps(cdata).each |dep| {
|
for decoder::get_crate_deps(cdata).each |dep| {
|
||||||
let extrn_cnum = dep.cnum;
|
let extrn_cnum = dep.cnum;
|
||||||
let cname = dep.name;
|
let cname = dep.name;
|
||||||
let cmetas = metas_with(dep.vers, @"vers", ~[]);
|
let cmetas = metas_with(dep.vers, @"vers"/~, ~[]);
|
||||||
#debug("resolving dep crate %s ver: %s hash: %s",
|
#debug("resolving dep crate %s ver: %s hash: %s",
|
||||||
*dep.name, *dep.vers, *dep.hash);
|
*dep.name, *dep.vers, *dep.hash);
|
||||||
alt existing_match(e, metas_with_ident(cname, cmetas), *dep.hash) {
|
alt existing_match(e, metas_with_ident(cname, cmetas), *dep.hash) {
|
||||||
|
|
|
@ -833,7 +833,7 @@ fn get_crate_vers(data: @~[u8]) -> @str/~ {
|
||||||
ret alt attr::last_meta_item_value_str_by_name(
|
ret alt attr::last_meta_item_value_str_by_name(
|
||||||
attr::find_linkage_metas(attrs), "vers") {
|
attr::find_linkage_metas(attrs), "vers") {
|
||||||
some(ver) { ver }
|
some(ver) { ver }
|
||||||
none { @"0.0" }
|
none { @"0.0"/~ }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -802,7 +802,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||||
ebml_w.start_tag(tag_items_data);
|
ebml_w.start_tag(tag_items_data);
|
||||||
vec::push(*index, {val: crate_node_id, pos: ebml_w.writer.tell()});
|
vec::push(*index, {val: crate_node_id, pos: ebml_w.writer.tell()});
|
||||||
encode_info_for_mod(ecx, ebml_w, crate.node.module,
|
encode_info_for_mod(ecx, ebml_w, crate.node.module,
|
||||||
crate_node_id, ~[], @"");
|
crate_node_id, ~[], @""/~);
|
||||||
visit::visit_crate(*crate, (), visit::mk_vt(@{
|
visit::visit_crate(*crate, (), visit::mk_vt(@{
|
||||||
visit_expr: |_e, _cx, _v| { },
|
visit_expr: |_e, _cx, _v| { },
|
||||||
visit_item: |i, cx, v, copy ebml_w| {
|
visit_item: |i, cx, v, copy ebml_w| {
|
||||||
|
@ -952,18 +952,18 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] {
|
||||||
assert (*ecx.link_meta.vers != "");
|
assert (*ecx.link_meta.vers != "");
|
||||||
|
|
||||||
let name_item =
|
let name_item =
|
||||||
attr::mk_name_value_item_str(@"name", *ecx.link_meta.name);
|
attr::mk_name_value_item_str(@"name"/~, *ecx.link_meta.name);
|
||||||
let vers_item =
|
let vers_item =
|
||||||
attr::mk_name_value_item_str(@"vers", *ecx.link_meta.vers);
|
attr::mk_name_value_item_str(@"vers"/~, *ecx.link_meta.vers);
|
||||||
|
|
||||||
let other_items =
|
let other_items =
|
||||||
{
|
{
|
||||||
let tmp = attr::remove_meta_items_by_name(items, @"name");
|
let tmp = attr::remove_meta_items_by_name(items, @"name"/~);
|
||||||
attr::remove_meta_items_by_name(tmp, @"vers")
|
attr::remove_meta_items_by_name(tmp, @"vers"/~)
|
||||||
};
|
};
|
||||||
|
|
||||||
let meta_items = vec::append(~[name_item, vers_item], other_items);
|
let meta_items = vec::append(~[name_item, vers_item], other_items);
|
||||||
let link_item = attr::mk_list_item(@"link", meta_items);
|
let link_item = attr::mk_list_item(@"link"/~, meta_items);
|
||||||
|
|
||||||
ret attr::mk_attr(link_item);
|
ret attr::mk_attr(link_item);
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,8 +67,8 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: ~[@pat]) {
|
||||||
alt ty::get(ty).struct {
|
alt ty::get(ty).struct {
|
||||||
ty::ty_bool {
|
ty::ty_bool {
|
||||||
alt check ctor {
|
alt check ctor {
|
||||||
val(const_int(1i64)) { some(@"true") }
|
val(const_int(1i64)) { some(@"true"/~) }
|
||||||
val(const_int(0i64)) { some(@"false") }
|
val(const_int(0i64)) { some(@"false"/~) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::ty_enum(id, _) {
|
ty::ty_enum(id, _) {
|
||||||
|
|
|
@ -458,7 +458,7 @@ fn check_item_old_vecs(cx: ty::ctxt, it: @ast::item) {
|
||||||
}
|
}
|
||||||
ast::ty_path(@{span: _, global: _, idents: ids,
|
ast::ty_path(@{span: _, global: _, idents: ids,
|
||||||
rp: none, types: _}, _)
|
rp: none, types: _}, _)
|
||||||
if ids == ~[@"str"] && (! uses_vstore.contains_key(t.id)) {
|
if ids == ~[@"str"/~] && (! uses_vstore.contains_key(t.id)) {
|
||||||
cx.sess.span_lint(
|
cx.sess.span_lint(
|
||||||
old_strs, t.id, it.id,
|
old_strs, t.id, it.id,
|
||||||
t.span, "deprecated str type");
|
t.span, "deprecated str type");
|
||||||
|
|
|
@ -290,8 +290,8 @@ class ir_maps {
|
||||||
alt self.var_kinds[*var] {
|
alt self.var_kinds[*var] {
|
||||||
vk_local(_, name) | vk_arg(_, name, _) {name}
|
vk_local(_, name) | vk_arg(_, name, _) {name}
|
||||||
vk_field(name) {@("self." + *name)}
|
vk_field(name) {@("self." + *name)}
|
||||||
vk_self {@"self"}
|
vk_self {@"self"/~}
|
||||||
vk_implicit_ret {@"<implicit-ret>"}
|
vk_implicit_ret {@"<implicit-ret>"/~}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -505,7 +505,7 @@ impl methods for determine_rp_ctxt {
|
||||||
fn region_is_relevant(r: @ast::region) -> bool {
|
fn region_is_relevant(r: @ast::region) -> bool {
|
||||||
alt r.node {
|
alt r.node {
|
||||||
ast::re_anon {self.anon_implies_rp}
|
ast::re_anon {self.anon_implies_rp}
|
||||||
ast::re_named(@"self") {true}
|
ast::re_named(@"self"/~) {true}
|
||||||
ast::re_named(_) {false}
|
ast::re_named(_) {false}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -885,7 +885,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident,
|
||||||
// import
|
// import
|
||||||
alt e.imports.find(n_id) {
|
alt e.imports.find(n_id) {
|
||||||
some(resolving(sp)) {
|
some(resolving(sp)) {
|
||||||
e.imports.insert(n_id, resolved(none, none, none, @~[], @"", sp));
|
e.imports.insert(n_id, resolved(none, none, none, @~[], @""/~, sp));
|
||||||
}
|
}
|
||||||
_ { }
|
_ { }
|
||||||
}
|
}
|
||||||
|
|
|
@ -560,22 +560,22 @@ class PrimitiveTypeTable {
|
||||||
new(atom_table: @AtomTable) {
|
new(atom_table: @AtomTable) {
|
||||||
self.primitive_types = atom_hashmap();
|
self.primitive_types = atom_hashmap();
|
||||||
|
|
||||||
self.intern(atom_table, @"bool", ty_bool);
|
self.intern(atom_table, @"bool"/~, ty_bool);
|
||||||
self.intern(atom_table, @"char", ty_int(ty_char));
|
self.intern(atom_table, @"char"/~, ty_int(ty_char));
|
||||||
self.intern(atom_table, @"float", ty_float(ty_f));
|
self.intern(atom_table, @"float"/~, ty_float(ty_f));
|
||||||
self.intern(atom_table, @"f32", ty_float(ty_f32));
|
self.intern(atom_table, @"f32"/~, ty_float(ty_f32));
|
||||||
self.intern(atom_table, @"f64", ty_float(ty_f64));
|
self.intern(atom_table, @"f64"/~, ty_float(ty_f64));
|
||||||
self.intern(atom_table, @"int", ty_int(ty_i));
|
self.intern(atom_table, @"int"/~, ty_int(ty_i));
|
||||||
self.intern(atom_table, @"i8", ty_int(ty_i8));
|
self.intern(atom_table, @"i8"/~, ty_int(ty_i8));
|
||||||
self.intern(atom_table, @"i16", ty_int(ty_i16));
|
self.intern(atom_table, @"i16"/~, ty_int(ty_i16));
|
||||||
self.intern(atom_table, @"i32", ty_int(ty_i32));
|
self.intern(atom_table, @"i32"/~, ty_int(ty_i32));
|
||||||
self.intern(atom_table, @"i64", ty_int(ty_i64));
|
self.intern(atom_table, @"i64"/~, ty_int(ty_i64));
|
||||||
self.intern(atom_table, @"str", ty_str);
|
self.intern(atom_table, @"str"/~, ty_str);
|
||||||
self.intern(atom_table, @"uint", ty_uint(ty_u));
|
self.intern(atom_table, @"uint"/~, ty_uint(ty_u));
|
||||||
self.intern(atom_table, @"u8", ty_uint(ty_u8));
|
self.intern(atom_table, @"u8"/~, ty_uint(ty_u8));
|
||||||
self.intern(atom_table, @"u16", ty_uint(ty_u16));
|
self.intern(atom_table, @"u16"/~, ty_uint(ty_u16));
|
||||||
self.intern(atom_table, @"u32", ty_uint(ty_u32));
|
self.intern(atom_table, @"u32"/~, ty_uint(ty_u32));
|
||||||
self.intern(atom_table, @"u64", ty_uint(ty_u64));
|
self.intern(atom_table, @"u64"/~, ty_uint(ty_u64));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn intern(atom_table: @AtomTable, string: @str/~,
|
fn intern(atom_table: @AtomTable, string: @str/~,
|
||||||
|
@ -651,7 +651,7 @@ class Resolver {
|
||||||
self.type_ribs = @dvec();
|
self.type_ribs = @dvec();
|
||||||
self.xray_context = NoXray;
|
self.xray_context = NoXray;
|
||||||
|
|
||||||
self.self_atom = (*self.atom_table).intern(@"self");
|
self.self_atom = (*self.atom_table).intern(@"self"/~);
|
||||||
self.primitive_type_table = @PrimitiveTypeTable(self.atom_table);
|
self.primitive_type_table = @PrimitiveTypeTable(self.atom_table);
|
||||||
|
|
||||||
self.namespaces = ~[ ModuleNS, TypeNS, ValueNS, ImplNS ];
|
self.namespaces = ~[ ModuleNS, TypeNS, ValueNS, ImplNS ];
|
||||||
|
@ -4234,7 +4234,7 @@ class Resolver {
|
||||||
current_module = module;
|
current_module = module;
|
||||||
}
|
}
|
||||||
BlockParentLink(module, node_id) {
|
BlockParentLink(module, node_id) {
|
||||||
atoms.push((*self.atom_table).intern(@"<opaque>"));
|
atoms.push((*self.atom_table).intern(@"<opaque>"/~));
|
||||||
current_module = module;
|
current_module = module;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -486,8 +486,8 @@ fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
|
||||||
let llalign = llalign_of(ccx, llty);
|
let llalign = llalign_of(ccx, llty);
|
||||||
//XXX this triggers duplicate LLVM symbols
|
//XXX this triggers duplicate LLVM symbols
|
||||||
let name = if false /*ccx.sess.opts.debuginfo*/ {
|
let name = if false /*ccx.sess.opts.debuginfo*/ {
|
||||||
mangle_internal_name_by_type_only(ccx, t, @"tydesc")
|
mangle_internal_name_by_type_only(ccx, t, @"tydesc"/~)
|
||||||
} else { mangle_internal_name_by_seq(ccx, @"tydesc") };
|
} else { mangle_internal_name_by_seq(ccx, @"tydesc"/~) };
|
||||||
note_unique_llvm_symbol(ccx, name);
|
note_unique_llvm_symbol(ccx, name);
|
||||||
log(debug, #fmt("+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name));
|
log(debug, #fmt("+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name));
|
||||||
let gvar = str::as_c_str(name, |buf| {
|
let gvar = str::as_c_str(name, |buf| {
|
||||||
|
@ -667,8 +667,8 @@ fn incr_refcnt_of_boxed(cx: block, box_ptr: ValueRef) {
|
||||||
fn make_visit_glue(bcx: block, v: ValueRef, t: ty::t) {
|
fn make_visit_glue(bcx: block, v: ValueRef, t: ty::t) {
|
||||||
let _icx = bcx.insn_ctxt("make_visit_glue");
|
let _icx = bcx.insn_ctxt("make_visit_glue");
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
assert bcx.ccx().tcx.intrinsic_defs.contains_key(@"ty_visitor");
|
assert bcx.ccx().tcx.intrinsic_defs.contains_key(@"ty_visitor"/~);
|
||||||
let (iid, ty) = bcx.ccx().tcx.intrinsic_defs.get(@"ty_visitor");
|
let (iid, ty) = bcx.ccx().tcx.intrinsic_defs.get(@"ty_visitor"/~);
|
||||||
let v = PointerCast(bcx, v, T_ptr(type_of::type_of(bcx.ccx(), ty)));
|
let v = PointerCast(bcx, v, T_ptr(type_of::type_of(bcx.ccx(), ty)));
|
||||||
bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, iid);
|
bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, iid);
|
||||||
build_return(bcx);
|
build_return(bcx);
|
||||||
|
@ -2131,7 +2131,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
|
||||||
must_cast: true};
|
must_cast: true};
|
||||||
}
|
}
|
||||||
ast_map::node_ctor(nm, _, ct, _, pt) { (pt, nm, ct.span) }
|
ast_map::node_ctor(nm, _, ct, _, pt) { (pt, nm, ct.span) }
|
||||||
ast_map::node_dtor(_, dtor, _, pt) {(pt, @"drop", dtor.span)}
|
ast_map::node_dtor(_, dtor, _, pt) {(pt, @"drop"/~, dtor.span)}
|
||||||
ast_map::node_expr(*) { ccx.tcx.sess.bug("Can't monomorphize an expr") }
|
ast_map::node_expr(*) { ccx.tcx.sess.bug("Can't monomorphize an expr") }
|
||||||
ast_map::node_export(*) {
|
ast_map::node_export(*) {
|
||||||
ccx.tcx.sess.bug("Can't monomorphize an export")
|
ccx.tcx.sess.bug("Can't monomorphize an export")
|
||||||
|
@ -3825,7 +3825,7 @@ fn trans_log(log_ex: @ast::expr, lvl: @ast::expr,
|
||||||
ccx.module_data.get(modname)
|
ccx.module_data.get(modname)
|
||||||
} else {
|
} else {
|
||||||
let s = link::mangle_internal_name_by_path_and_seq(
|
let s = link::mangle_internal_name_by_path_and_seq(
|
||||||
ccx, modpath, @"loglevel");
|
ccx, modpath, @"loglevel"/~);
|
||||||
let global = str::as_c_str(s, |buf| {
|
let global = str::as_c_str(s, |buf| {
|
||||||
llvm::LLVMAddGlobal(ccx.llmod, T_i32(), buf)
|
llvm::LLVMAddGlobal(ccx.llmod, T_i32(), buf)
|
||||||
});
|
});
|
||||||
|
@ -4563,7 +4563,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id,
|
||||||
let fn_args = vec::map(variant.node.args, |varg|
|
let fn_args = vec::map(variant.node.args, |varg|
|
||||||
{mode: ast::expl(ast::by_copy),
|
{mode: ast::expl(ast::by_copy),
|
||||||
ty: varg.ty,
|
ty: varg.ty,
|
||||||
ident: @"arg",
|
ident: @"arg"/~,
|
||||||
id: varg.id});
|
id: varg.id});
|
||||||
let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id,
|
let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id,
|
||||||
param_substs, none);
|
param_substs, none);
|
||||||
|
@ -5215,7 +5215,7 @@ fn trans_constant(ccx: @crate_ctxt, it: @ast::item) {
|
||||||
let path = item_path(ccx, it);
|
let path = item_path(ccx, it);
|
||||||
for vec::each(variants) |variant| {
|
for vec::each(variants) |variant| {
|
||||||
let p = vec::append(path, ~[path_name(variant.node.name),
|
let p = vec::append(path, ~[path_name(variant.node.name),
|
||||||
path_name(@"discrim")]);
|
path_name(@"discrim"/~)]);
|
||||||
let s = mangle_exported_name(ccx, p, ty::mk_int(ccx.tcx));
|
let s = mangle_exported_name(ccx, p, ty::mk_int(ccx.tcx));
|
||||||
let disr_val = vi[i].disr_val;
|
let disr_val = vi[i].disr_val;
|
||||||
note_unique_llvm_symbol(ccx, s);
|
note_unique_llvm_symbol(ccx, s);
|
||||||
|
|
|
@ -365,7 +365,7 @@ fn trans_expr_fn(bcx: block,
|
||||||
let ccx = bcx.ccx(), bcx = bcx;
|
let ccx = bcx.ccx(), bcx = bcx;
|
||||||
let fty = node_id_type(bcx, id);
|
let fty = node_id_type(bcx, id);
|
||||||
let llfnty = type_of_fn_from_ty(ccx, fty);
|
let llfnty = type_of_fn_from_ty(ccx, fty);
|
||||||
let sub_path = vec::append_one(bcx.fcx.path, path_name(@"anon"));
|
let sub_path = vec::append_one(bcx.fcx.path, path_name(@"anon"/~));
|
||||||
let s = mangle_internal_name_by_path(ccx, sub_path);
|
let s = mangle_internal_name_by_path(ccx, sub_path);
|
||||||
let llfn = decl_internal_cdecl_fn(ccx.llmod, s, llfnty);
|
let llfn = decl_internal_cdecl_fn(ccx.llmod, s, llfnty);
|
||||||
|
|
||||||
|
|
|
@ -1001,7 +1001,7 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
|
||||||
let _icx = ccx.insn_ctxt("foreign::foreign::build_rust_fn");
|
let _icx = ccx.insn_ctxt("foreign::foreign::build_rust_fn");
|
||||||
let t = ty::node_id_to_type(ccx.tcx, id);
|
let t = ty::node_id_to_type(ccx.tcx, id);
|
||||||
let ps = link::mangle_internal_name_by_path(
|
let ps = link::mangle_internal_name_by_path(
|
||||||
ccx, vec::append_one(path, ast_map::path_name(@"__rust_abi")));
|
ccx, vec::append_one(path, ast_map::path_name(@"__rust_abi"/~)));
|
||||||
let llty = type_of_fn_from_ty(ccx, t);
|
let llty = type_of_fn_from_ty(ccx, t);
|
||||||
let llfndecl = decl_internal_cdecl_fn(ccx.llmod, ps, llty);
|
let llfndecl = decl_internal_cdecl_fn(ccx.llmod, ps, llty);
|
||||||
trans_fn(ccx, path, decl, body, llfndecl, no_self, none, id);
|
trans_fn(ccx, path, decl, body, llfndecl, no_self, none, id);
|
||||||
|
@ -1039,7 +1039,7 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
|
||||||
|
|
||||||
let shim_name = link::mangle_internal_name_by_path(
|
let shim_name = link::mangle_internal_name_by_path(
|
||||||
ccx, vec::append_one(path,
|
ccx, vec::append_one(path,
|
||||||
ast_map::path_name(@"__rust_stack_shim")));
|
ast_map::path_name(@"__rust_stack_shim"/~)));
|
||||||
ret build_shim_fn_(ccx, shim_name, llrustfn, tys,
|
ret build_shim_fn_(ccx, shim_name, llrustfn, tys,
|
||||||
lib::llvm::CCallConv,
|
lib::llvm::CCallConv,
|
||||||
build_args, build_ret);
|
build_args, build_ret);
|
||||||
|
|
|
@ -297,8 +297,8 @@ fn emit_calls_to_trait_visit_ty(bcx: block, t: ty::t,
|
||||||
visitor_iid: def_id) -> block {
|
visitor_iid: def_id) -> block {
|
||||||
|
|
||||||
let final = sub_block(bcx, "final");
|
let final = sub_block(bcx, "final");
|
||||||
assert bcx.ccx().tcx.intrinsic_defs.contains_key(@"tydesc");
|
assert bcx.ccx().tcx.intrinsic_defs.contains_key(@"tydesc"/~);
|
||||||
let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(@"tydesc");
|
let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(@"tydesc"/~);
|
||||||
let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty);
|
let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty);
|
||||||
let r = reflector({
|
let r = reflector({
|
||||||
visitor_val: visitor_val,
|
visitor_val: visitor_val,
|
||||||
|
|
|
@ -730,7 +730,7 @@ fn simplify_type(tcx: ty::ctxt, typ: ty::t) -> ty::t {
|
||||||
ty::ty_class(did, substs) {
|
ty::ty_class(did, substs) {
|
||||||
let simpl_fields = (if is_some(ty::ty_dtor(tcx, did)) {
|
let simpl_fields = (if is_some(ty::ty_dtor(tcx, did)) {
|
||||||
// remember the drop flag
|
// remember the drop flag
|
||||||
~[{ident: @"drop", mt: {ty:
|
~[{ident: @"drop"/~, mt: {ty:
|
||||||
ty::mk_u8(tcx),
|
ty::mk_u8(tcx),
|
||||||
mutbl: ast::m_mutbl}}] }
|
mutbl: ast::m_mutbl}}] }
|
||||||
else { ~[] }) +
|
else { ~[] }) +
|
||||||
|
|
|
@ -2661,7 +2661,7 @@ fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path {
|
||||||
vec::append_one(*path, ast_map::path_name(nm))
|
vec::append_one(*path, ast_map::path_name(nm))
|
||||||
}
|
}
|
||||||
ast_map::node_dtor(_, _, _, path) {
|
ast_map::node_dtor(_, _, _, path) {
|
||||||
vec::append_one(*path, ast_map::path_name(@"dtor"))
|
vec::append_one(*path, ast_map::path_name(@"dtor"/~))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1625,7 +1625,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
|
||||||
let p_ty = fcx.expr_ty(p);
|
let p_ty = fcx.expr_ty(p);
|
||||||
|
|
||||||
let lkup = method::lookup(fcx, p, p, expr.id, alloc_id,
|
let lkup = method::lookup(fcx, p, p, expr.id, alloc_id,
|
||||||
@"alloc", p_ty, ~[], false);
|
@"alloc"/~, p_ty, ~[], false);
|
||||||
alt lkup.method() {
|
alt lkup.method() {
|
||||||
some(entry) {
|
some(entry) {
|
||||||
fcx.ccx.method_map.insert(alloc_id, entry);
|
fcx.ccx.method_map.insert(alloc_id, entry);
|
||||||
|
@ -2282,10 +2282,10 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::foreign_item) {
|
||||||
(1u, ~[], ty::mk_nil_ptr(tcx))
|
(1u, ~[], ty::mk_nil_ptr(tcx))
|
||||||
}
|
}
|
||||||
"visit_tydesc" {
|
"visit_tydesc" {
|
||||||
assert ccx.tcx.intrinsic_defs.contains_key(@"tydesc");
|
assert ccx.tcx.intrinsic_defs.contains_key(@"tydesc"/~);
|
||||||
assert ccx.tcx.intrinsic_defs.contains_key(@"ty_visitor");
|
assert ccx.tcx.intrinsic_defs.contains_key(@"ty_visitor"/~);
|
||||||
let (_, tydesc_ty) = ccx.tcx.intrinsic_defs.get(@"tydesc");
|
let (_, tydesc_ty) = ccx.tcx.intrinsic_defs.get(@"tydesc"/~);
|
||||||
let (_, visitor_trait) = ccx.tcx.intrinsic_defs.get(@"ty_visitor");
|
let (_, visitor_trait) = ccx.tcx.intrinsic_defs.get(@"ty_visitor"/~);
|
||||||
let td_ptr = ty::mk_ptr(ccx.tcx, {ty: tydesc_ty,
|
let td_ptr = ty::mk_ptr(ccx.tcx, {ty: tydesc_ty,
|
||||||
mutbl: ast::m_imm});
|
mutbl: ast::m_imm});
|
||||||
(0u, ~[arg(ast::by_val, td_ptr),
|
(0u, ~[arg(ast::by_val, td_ptr),
|
||||||
|
|
|
@ -74,7 +74,7 @@ fn local_rhs_span(l: @ast::local, def: span) -> span {
|
||||||
fn is_main_name(path: syntax::ast_map::path) -> bool {
|
fn is_main_name(path: syntax::ast_map::path) -> bool {
|
||||||
// FIXME (#34): path should be a constrained type, so we know
|
// FIXME (#34): path should be a constrained type, so we know
|
||||||
// the call to last doesn't fail.
|
// the call to last doesn't fail.
|
||||||
vec::last(path) == syntax::ast_map::path_name(@"main")
|
vec::last(path) == syntax::ast_map::path_name(@"main"/~)
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
|
|
@ -116,7 +116,7 @@ fn pandoc_header_id(header: str) -> str {
|
||||||
let s = str::replace(s, "]", "");
|
let s = str::replace(s, "]", "");
|
||||||
let s = str::replace(s, "(", "");
|
let s = str::replace(s, "(", "");
|
||||||
let s = str::replace(s, ")", "");
|
let s = str::replace(s, ")", "");
|
||||||
let s = str::replace(s, "@", "");
|
let s = str::replace(s, "@", "/~"/~);
|
||||||
let s = str::replace(s, "~", "");
|
let s = str::replace(s, "~", "");
|
||||||
let s = str::replace(s, "/", "");
|
let s = str::replace(s, "/", "");
|
||||||
let s = str::replace(s, ":", "");
|
let s = str::replace(s, ":", "");
|
||||||
|
|
|
@ -5,7 +5,7 @@ fn failfn() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let x = @"hi";
|
let x = @"hi"/~;
|
||||||
failfn();
|
failfn();
|
||||||
log(error, x);
|
log(error, x);
|
||||||
}
|
}
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let cheese = "roquefort";
|
let cheese = "roquefort";
|
||||||
let carrots = @"crunchy";
|
let carrots = @"crunchy"/~;
|
||||||
|
|
||||||
fn@(tasties: @str/~, macerate: fn(str)) {
|
fn@(tasties: @str/~, macerate: fn(str)) {
|
||||||
macerate(*tasties);
|
macerate(*tasties);
|
||||||
|
|
|
@ -18,8 +18,8 @@ fn main() {
|
||||||
let map = map::hashmap(hash, eq);
|
let map = map::hashmap(hash, eq);
|
||||||
let mut arr = ~[];
|
let mut arr = ~[];
|
||||||
for uint::range(0u, 10u) |i| {
|
for uint::range(0u, 10u) |i| {
|
||||||
arr += ~[@"key stuff"];
|
arr += ~[@"key stuff"/~];
|
||||||
map.insert(arr, arr + ~[@"value stuff"]);
|
map.insert(arr, arr + ~[@"value stuff"/~]);
|
||||||
}
|
}
|
||||||
map.insert(~[@"boom"], ~[]);
|
map.insert(~[@"boom"/~], ~[]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,6 @@
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
assert (@1 < @3);
|
assert (@1 < @3);
|
||||||
assert (@@"hello " > @@"hello");
|
assert (@@"hello "/~ > @@"hello"/~);
|
||||||
assert (@@@"hello" != @@@"there");
|
assert (@@@"hello"/~ != @@@"there"/~);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ import std::map::str_hash;
|
||||||
import dvec;
|
import dvec;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let v = ~[mut @"hi"];
|
let v = ~[mut @"hi"/~];
|
||||||
let m: req::header_map = str_hash();
|
let m: req::header_map = str_hash();
|
||||||
m.insert("METHOD", @dvec::from_vec(v));
|
m.insert("METHOD", @dvec::from_vec(v));
|
||||||
request::<int>(m);
|
request::<int>(m);
|
||||||
|
|
|
@ -6,7 +6,7 @@ fn perform_hax<T>(x: @T) -> hax {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deadcode() {
|
fn deadcode() {
|
||||||
perform_hax(@"deadcode");
|
perform_hax(@"deadcode"/~);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
|
@ -6,7 +6,7 @@ fn perform_hax<T>(x: @T) -> hax {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deadcode() {
|
fn deadcode() {
|
||||||
perform_hax(@"deadcode");
|
perform_hax(@"deadcode"/~);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue