librustc: Don't accept as Trait
anymore; fix all occurrences of it.
This commit is contained in:
parent
24a0de4e7f
commit
b1c699815d
101 changed files with 676 additions and 538 deletions
|
@ -1227,7 +1227,7 @@ to pointers to the trait name, used as a type.
|
||||||
# impl Shape for int { }
|
# impl Shape for int { }
|
||||||
# let mycircle = 0;
|
# let mycircle = 0;
|
||||||
|
|
||||||
let myshape: Shape = @mycircle as @Shape;
|
let myshape: @Shape = @mycircle as @Shape;
|
||||||
~~~~
|
~~~~
|
||||||
|
|
||||||
The resulting value is a managed box containing the value that was cast,
|
The resulting value is a managed box containing the value that was cast,
|
||||||
|
|
|
@ -17,6 +17,7 @@ Simple compression
|
||||||
use libc;
|
use libc;
|
||||||
use libc::{c_void, size_t, c_int};
|
use libc::{c_void, size_t, c_int};
|
||||||
use ptr;
|
use ptr;
|
||||||
|
use rand::RngUtil;
|
||||||
use vec;
|
use vec;
|
||||||
|
|
||||||
#[cfg(test)] use rand;
|
#[cfg(test)] use rand;
|
||||||
|
|
|
@ -20,6 +20,7 @@ pub mod linear {
|
||||||
use hash::Hash;
|
use hash::Hash;
|
||||||
use iter;
|
use iter;
|
||||||
use option::{None, Option, Some};
|
use option::{None, Option, Some};
|
||||||
|
use rand::RngUtil;
|
||||||
use rand;
|
use rand;
|
||||||
use uint;
|
use uint;
|
||||||
use vec;
|
use vec;
|
||||||
|
|
|
@ -785,8 +785,7 @@ pub fn fd_writer(fd: fd_t, cleanup: bool) -> @Writer {
|
||||||
|
|
||||||
|
|
||||||
pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
|
pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
|
||||||
-> Result<Writer, ~str> {
|
-> Result<@Writer, ~str> {
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
fn wb() -> c_int {
|
fn wb() -> c_int {
|
||||||
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
|
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
|
||||||
|
@ -1079,22 +1078,24 @@ impl<T:Writer> WriterUtil for T {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_implicitly_copyable_typarams)]
|
#[allow(non_implicitly_copyable_typarams)]
|
||||||
pub fn file_writer(path: &Path, flags: &[FileFlag]) -> Result<Writer, ~str> {
|
pub fn file_writer(path: &Path, flags: &[FileFlag]) -> Result<@Writer, ~str> {
|
||||||
mk_file_writer(path, flags).chain(|w| result::Ok(w))
|
mk_file_writer(path, flags).chain(|w| result::Ok(w))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// FIXME: fileflags // #2004
|
// FIXME: fileflags // #2004
|
||||||
pub fn buffered_file_writer(path: &Path) -> Result<Writer, ~str> {
|
pub fn buffered_file_writer(path: &Path) -> Result<@Writer, ~str> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let f = do os::as_c_charp(path.to_str()) |pathbuf| {
|
let f = do os::as_c_charp(path.to_str()) |pathbuf| {
|
||||||
do os::as_c_charp("w") |modebuf| {
|
do os::as_c_charp("w") |modebuf| {
|
||||||
libc::fopen(pathbuf, modebuf)
|
libc::fopen(pathbuf, modebuf)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return if f as uint == 0u { result::Err(~"error opening "
|
return if f as uint == 0u {
|
||||||
+ path.to_str()) }
|
result::Err(~"error opening " + path.to_str())
|
||||||
else { result::Ok(FILE_writer(f, true)) }
|
} else {
|
||||||
|
result::Ok(FILE_writer(f, true))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1142,14 +1143,14 @@ pub pure fn BytesWriter() -> BytesWriter {
|
||||||
BytesWriter { bytes: ~[], mut pos: 0u }
|
BytesWriter { bytes: ~[], mut pos: 0u }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub pure fn with_bytes_writer(f: &fn(Writer)) -> ~[u8] {
|
pub pure fn with_bytes_writer(f: &fn(@Writer)) -> ~[u8] {
|
||||||
let wr = @BytesWriter();
|
let wr = @BytesWriter();
|
||||||
f(wr as Writer);
|
f(wr as @Writer);
|
||||||
let @BytesWriter{bytes, _} = wr;
|
let @BytesWriter{bytes, _} = wr;
|
||||||
return bytes;
|
return bytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub pure fn with_str_writer(f: &fn(Writer)) -> ~str {
|
pub pure fn with_str_writer(f: &fn(@Writer)) -> ~str {
|
||||||
let mut v = with_bytes_writer(f);
|
let mut v = with_bytes_writer(f);
|
||||||
|
|
||||||
// FIXME (#3758): This should not be needed.
|
// FIXME (#3758): This should not be needed.
|
||||||
|
@ -1277,8 +1278,8 @@ pub mod fsync {
|
||||||
pub trait FSyncable { fn fsync(&self, l: Level) -> int; }
|
pub trait FSyncable { fn fsync(&self, l: Level) -> int; }
|
||||||
|
|
||||||
// Call o.fsync after executing blk
|
// Call o.fsync after executing blk
|
||||||
pub fn obj_sync(o: FSyncable, opt_level: Option<Level>,
|
pub fn obj_sync(o: @FSyncable, opt_level: Option<Level>,
|
||||||
blk: &fn(v: Res<FSyncable>)) {
|
blk: &fn(v: Res<@FSyncable>)) {
|
||||||
blk(Res(Arg {
|
blk(Res(Arg {
|
||||||
val: o, opt_level: opt_level,
|
val: o, opt_level: opt_level,
|
||||||
fsync_fn: |o, l| o.fsync(l)
|
fsync_fn: |o, l| o.fsync(l)
|
||||||
|
@ -1305,12 +1306,12 @@ mod tests {
|
||||||
~"A hoopy frood who really knows where his towel is.";
|
~"A hoopy frood who really knows where his towel is.";
|
||||||
debug!(copy frood);
|
debug!(copy frood);
|
||||||
{
|
{
|
||||||
let out: io::Writer =
|
let out: @io::Writer =
|
||||||
result::get(
|
result::get(
|
||||||
&io::file_writer(tmpfile, ~[io::Create, io::Truncate]));
|
&io::file_writer(tmpfile, ~[io::Create, io::Truncate]));
|
||||||
out.write_str(frood);
|
out.write_str(frood);
|
||||||
}
|
}
|
||||||
let inp: io::Reader = result::get(&io::file_reader(tmpfile));
|
let inp: @io::Reader = result::get(&io::file_reader(tmpfile));
|
||||||
let frood2: ~str = inp.read_c_str();
|
let frood2: ~str = inp.read_c_str();
|
||||||
debug!(copy frood2);
|
debug!(copy frood2);
|
||||||
fail_unless!(frood == frood2);
|
fail_unless!(frood == frood2);
|
||||||
|
|
|
@ -1265,6 +1265,7 @@ mod tests {
|
||||||
use os::{remove_file, setenv};
|
use os::{remove_file, setenv};
|
||||||
use os;
|
use os;
|
||||||
use path::Path;
|
use path::Path;
|
||||||
|
use rand::RngUtil;
|
||||||
use rand;
|
use rand;
|
||||||
use run;
|
use run;
|
||||||
use str;
|
use str;
|
||||||
|
@ -1282,7 +1283,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_rand_name() -> ~str {
|
fn make_rand_name() -> ~str {
|
||||||
let rng: rand::Rng = rand::Rng();
|
let rng: @rand::Rng = rand::Rng();
|
||||||
let n = ~"TEST" + rng.gen_str(10u);
|
let n = ~"TEST" + rng.gen_str(10u);
|
||||||
fail_unless!(getenv(n).is_none());
|
fail_unless!(getenv(n).is_none());
|
||||||
n
|
n
|
||||||
|
|
|
@ -22,97 +22,100 @@ use libc::size_t;
|
||||||
|
|
||||||
/// A type that can be randomly generated using an RNG
|
/// A type that can be randomly generated using an RNG
|
||||||
pub trait Rand {
|
pub trait Rand {
|
||||||
static fn rand(rng: rand::Rng) -> Self;
|
static fn rand(rng: @rand::Rng) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for int {
|
impl Rand for int {
|
||||||
static fn rand(rng: rand::Rng) -> int {
|
static fn rand(rng: @rand::Rng) -> int {
|
||||||
rng.gen_int()
|
rng.gen_int()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for i8 {
|
impl Rand for i8 {
|
||||||
static fn rand(rng: rand::Rng) -> i8 {
|
static fn rand(rng: @rand::Rng) -> i8 {
|
||||||
rng.gen_i8()
|
rng.gen_i8()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for i16 {
|
impl Rand for i16 {
|
||||||
static fn rand(rng: rand::Rng) -> i16 {
|
static fn rand(rng: @rand::Rng) -> i16 {
|
||||||
rng.gen_i16()
|
rng.gen_i16()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for i32 {
|
impl Rand for i32 {
|
||||||
static fn rand(rng: rand::Rng) -> i32 {
|
static fn rand(rng: @rand::Rng) -> i32 {
|
||||||
rng.gen_i32()
|
rng.gen_i32()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for i64 {
|
impl Rand for i64 {
|
||||||
static fn rand(rng: rand::Rng) -> i64 {
|
static fn rand(rng: @rand::Rng) -> i64 {
|
||||||
rng.gen_i64()
|
rng.gen_i64()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for u8 {
|
impl Rand for u8 {
|
||||||
static fn rand(rng: rand::Rng) -> u8 {
|
static fn rand(rng: @rand::Rng) -> u8 {
|
||||||
rng.gen_u8()
|
rng.gen_u8()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for u16 {
|
impl Rand for u16 {
|
||||||
static fn rand(rng: rand::Rng) -> u16 {
|
static fn rand(rng: @rand::Rng) -> u16 {
|
||||||
rng.gen_u16()
|
rng.gen_u16()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for u32 {
|
impl Rand for u32 {
|
||||||
static fn rand(rng: rand::Rng) -> u32 {
|
static fn rand(rng: @rand::Rng) -> u32 {
|
||||||
rng.gen_u32()
|
rng.gen_u32()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for u64 {
|
impl Rand for u64 {
|
||||||
static fn rand(rng: rand::Rng) -> u64 {
|
static fn rand(rng: @rand::Rng) -> u64 {
|
||||||
rng.gen_u64()
|
rng.gen_u64()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for float {
|
impl Rand for float {
|
||||||
static fn rand(rng: rand::Rng) -> float {
|
static fn rand(rng: @rand::Rng) -> float {
|
||||||
rng.gen_float()
|
rng.gen_float()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for f32 {
|
impl Rand for f32 {
|
||||||
static fn rand(rng: rand::Rng) -> f32 {
|
static fn rand(rng: @rand::Rng) -> f32 {
|
||||||
rng.gen_f32()
|
rng.gen_f32()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for f64 {
|
impl Rand for f64 {
|
||||||
static fn rand(rng: rand::Rng) -> f64 {
|
static fn rand(rng: @rand::Rng) -> f64 {
|
||||||
rng.gen_f64()
|
rng.gen_f64()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for char {
|
impl Rand for char {
|
||||||
static fn rand(rng: rand::Rng) -> char {
|
static fn rand(rng: @rand::Rng) -> char {
|
||||||
rng.gen_char()
|
rng.gen_char()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rand for bool {
|
impl Rand for bool {
|
||||||
static fn rand(rng: rand::Rng) -> bool {
|
static fn rand(rng: @rand::Rng) -> bool {
|
||||||
rng.gen_bool()
|
rng.gen_bool()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T:Rand> Rand for Option<T> {
|
impl<T:Rand> Rand for Option<T> {
|
||||||
static fn rand(rng: rand::Rng) -> Option<T> {
|
static fn rand(rng: @rand::Rng) -> Option<T> {
|
||||||
if rng.gen_bool() { Some(Rand::rand(rng)) }
|
if rng.gen_bool() {
|
||||||
else { None }
|
Some(Rand::rand(rng))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,8 +148,83 @@ pub struct Weighted<T> {
|
||||||
item: T,
|
item: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait RngUtil {
|
||||||
|
fn gen<T:Rand>(&self) -> T;
|
||||||
|
/// Return a random int
|
||||||
|
fn gen_int(&self) -> int;
|
||||||
|
fn gen_int_range(&self, start: int, end: int) -> int;
|
||||||
|
/// Return a random i8
|
||||||
|
fn gen_i8(&self) -> i8;
|
||||||
|
/// Return a random i16
|
||||||
|
fn gen_i16(&self) -> i16;
|
||||||
|
/// Return a random i32
|
||||||
|
fn gen_i32(&self) -> i32;
|
||||||
|
/// Return a random i64
|
||||||
|
fn gen_i64(&self) -> i64;
|
||||||
|
/// Return a random uint
|
||||||
|
fn gen_uint(&self) -> uint;
|
||||||
|
/**
|
||||||
|
* Return a uint randomly chosen from the range [start, end),
|
||||||
|
* failing if start >= end
|
||||||
|
*/
|
||||||
|
fn gen_uint_range(&self, start: uint, end: uint) -> uint;
|
||||||
|
/// Return a random u8
|
||||||
|
fn gen_u8(&self) -> u8;
|
||||||
|
/// Return a random u16
|
||||||
|
fn gen_u16(&self) -> u16;
|
||||||
|
/// Return a random u32
|
||||||
|
fn gen_u32(&self) -> u32;
|
||||||
|
/// Return a random u64
|
||||||
|
fn gen_u64(&self) -> u64;
|
||||||
|
/// Return a random float in the interval [0,1]
|
||||||
|
fn gen_float(&self) -> float;
|
||||||
|
/// Return a random f32 in the interval [0,1]
|
||||||
|
fn gen_f32(&self) -> f32;
|
||||||
|
/// Return a random f64 in the interval [0,1]
|
||||||
|
fn gen_f64(&self) -> f64;
|
||||||
|
/// Return a random char
|
||||||
|
fn gen_char(&self) -> char;
|
||||||
|
/**
|
||||||
|
* Return a char randomly chosen from chars, failing if chars is empty
|
||||||
|
*/
|
||||||
|
fn gen_char_from(&self, chars: &str) -> char;
|
||||||
|
/// Return a random bool
|
||||||
|
fn gen_bool(&self) -> bool;
|
||||||
|
/// Return a bool with a 1 in n chance of true
|
||||||
|
fn gen_weighted_bool(&self, n: uint) -> bool;
|
||||||
|
/**
|
||||||
|
* Return a random string of the specified length composed of A-Z,a-z,0-9
|
||||||
|
*/
|
||||||
|
fn gen_str(&self, len: uint) -> ~str;
|
||||||
|
/// Return a random byte string of the specified length
|
||||||
|
fn gen_bytes(&self, len: uint) -> ~[u8];
|
||||||
|
/// Choose an item randomly, failing if values is empty
|
||||||
|
fn choose<T:Copy>(&self, values: &[T]) -> T;
|
||||||
|
/// Choose Some(item) randomly, returning None if values is empty
|
||||||
|
fn choose_option<T:Copy>(&self, values: &[T]) -> Option<T>;
|
||||||
|
/**
|
||||||
|
* Choose an item respecting the relative weights, failing if the sum of
|
||||||
|
* the weights is 0
|
||||||
|
*/
|
||||||
|
fn choose_weighted<T:Copy>(&self, v : &[Weighted<T>]) -> T;
|
||||||
|
/**
|
||||||
|
* Choose Some(item) respecting the relative weights, returning none if
|
||||||
|
* the sum of the weights is 0
|
||||||
|
*/
|
||||||
|
fn choose_weighted_option<T:Copy>(&self, v: &[Weighted<T>]) -> Option<T>;
|
||||||
|
/**
|
||||||
|
* Return a vec containing copies of the items, in order, where
|
||||||
|
* the weight of the item determines how many copies there are
|
||||||
|
*/
|
||||||
|
fn weighted_vec<T:Copy>(&self, v: &[Weighted<T>]) -> ~[T];
|
||||||
|
/// Shuffle a vec
|
||||||
|
fn shuffle<T:Copy>(&self, values: &[T]) -> ~[T];
|
||||||
|
/// Shuffle a mutable vec in place
|
||||||
|
fn shuffle_mut<T>(&self, values: &mut [T]);
|
||||||
|
}
|
||||||
|
|
||||||
/// Extension methods for random number generators
|
/// Extension methods for random number generators
|
||||||
pub impl Rng {
|
impl RngUtil for @Rng {
|
||||||
/// Return a random value for a Rand type
|
/// Return a random value for a Rand type
|
||||||
fn gen<T:Rand>(&self) -> T {
|
fn gen<T:Rand>(&self) -> T {
|
||||||
Rand::rand(*self)
|
Rand::rand(*self)
|
||||||
|
@ -407,7 +485,7 @@ pub fn seed() -> ~[u8] {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a random number generator with a system specified seed
|
/// Create a random number generator with a system specified seed
|
||||||
pub fn Rng() -> Rng {
|
pub fn Rng() -> @Rng {
|
||||||
seeded_rng(seed())
|
seeded_rng(seed())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,7 +527,7 @@ impl Rng for XorShiftState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub pure fn xorshift() -> Rng {
|
pub pure fn xorshift() -> @Rng {
|
||||||
// constants taken from http://en.wikipedia.org/wiki/Xorshift
|
// constants taken from http://en.wikipedia.org/wiki/Xorshift
|
||||||
seeded_xorshift(123456789u32, 362436069u32, 521288629u32, 88675123u32)
|
seeded_xorshift(123456789u32, 362436069u32, 521288629u32, 88675123u32)
|
||||||
}
|
}
|
||||||
|
@ -467,7 +545,7 @@ fn tls_rng_state(_v: @RandRes) {}
|
||||||
* seeded by the system. Intended to be used in method chaining style, ie
|
* seeded by the system. Intended to be used in method chaining style, ie
|
||||||
* task_rng().gen_int().
|
* task_rng().gen_int().
|
||||||
*/
|
*/
|
||||||
pub fn task_rng() -> Rng {
|
pub fn task_rng() -> @Rng {
|
||||||
let r : Option<@RandRes>;
|
let r : Option<@RandRes>;
|
||||||
unsafe {
|
unsafe {
|
||||||
r = task::local_data::local_data_get(tls_rng_state);
|
r = task::local_data::local_data_get(tls_rng_state);
|
||||||
|
|
|
@ -41,7 +41,7 @@ trait EscapedCharWriter {
|
||||||
fn write_escaped_char(&self, ch: char);
|
fn write_escaped_char(&self, ch: char);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EscapedCharWriter for Writer {
|
impl EscapedCharWriter for @Writer {
|
||||||
fn write_escaped_char(&self, ch: char) {
|
fn write_escaped_char(&self, ch: char) {
|
||||||
match ch {
|
match ch {
|
||||||
'\t' => self.write_str("\\t"),
|
'\t' => self.write_str("\\t"),
|
||||||
|
|
|
@ -45,13 +45,13 @@ pub trait Program {
|
||||||
fn get_id(&mut self) -> pid_t;
|
fn get_id(&mut self) -> pid_t;
|
||||||
|
|
||||||
/// Returns an io::writer that can be used to write to stdin
|
/// Returns an io::writer that can be used to write to stdin
|
||||||
fn input(&mut self) -> io::Writer;
|
fn input(&mut self) -> @io::Writer;
|
||||||
|
|
||||||
/// Returns an io::reader that can be used to read from stdout
|
/// Returns an io::reader that can be used to read from stdout
|
||||||
fn output(&mut self) -> io::Reader;
|
fn output(&mut self) -> @io::Reader;
|
||||||
|
|
||||||
/// Returns an io::reader that can be used to read from stderr
|
/// Returns an io::reader that can be used to read from stderr
|
||||||
fn err(&mut self) -> io::Reader;
|
fn err(&mut self) -> @io::Reader;
|
||||||
|
|
||||||
/// Closes the handle to the child processes standard input
|
/// Closes the handle to the child processes standard input
|
||||||
fn close_input(&mut self);
|
fn close_input(&mut self);
|
||||||
|
@ -207,7 +207,7 @@ pub fn run_program(prog: &str, args: &[~str]) -> int {
|
||||||
*
|
*
|
||||||
* A class with a <program> field
|
* A class with a <program> field
|
||||||
*/
|
*/
|
||||||
pub fn start_program(prog: &str, args: &[~str]) -> Program {
|
pub fn start_program(prog: &str, args: &[~str]) -> @Program {
|
||||||
let pipe_input = os::pipe();
|
let pipe_input = os::pipe();
|
||||||
let pipe_output = os::pipe();
|
let pipe_output = os::pipe();
|
||||||
let pipe_err = os::pipe();
|
let pipe_err = os::pipe();
|
||||||
|
@ -274,13 +274,13 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
|
||||||
|
|
||||||
impl Program for ProgRes {
|
impl Program for ProgRes {
|
||||||
fn get_id(&mut self) -> pid_t { return self.r.pid; }
|
fn get_id(&mut self) -> pid_t { return self.r.pid; }
|
||||||
fn input(&mut self) -> io::Writer {
|
fn input(&mut self) -> @io::Writer {
|
||||||
io::fd_writer(self.r.in_fd, false)
|
io::fd_writer(self.r.in_fd, false)
|
||||||
}
|
}
|
||||||
fn output(&mut self) -> io::Reader {
|
fn output(&mut self) -> @io::Reader {
|
||||||
io::FILE_reader(self.r.out_file, false)
|
io::FILE_reader(self.r.out_file, false)
|
||||||
}
|
}
|
||||||
fn err(&mut self) -> io::Reader {
|
fn err(&mut self) -> @io::Reader {
|
||||||
io::FILE_reader(self.r.err_file, false)
|
io::FILE_reader(self.r.err_file, false)
|
||||||
}
|
}
|
||||||
fn close_input(&mut self) { close_repr_input(&mut self.r); }
|
fn close_input(&mut self) { close_repr_input(&mut self.r); }
|
||||||
|
|
|
@ -23,7 +23,7 @@ use super::rt::rust_task;
|
||||||
pub trait LocalData { }
|
pub trait LocalData { }
|
||||||
impl<T:Durable> LocalData for @T { }
|
impl<T:Durable> LocalData for @T { }
|
||||||
|
|
||||||
impl Eq for LocalData {
|
impl Eq for @LocalData {
|
||||||
pure fn eq(&self, other: &@LocalData) -> bool {
|
pure fn eq(&self, other: &@LocalData) -> bool {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr_a: (uint, uint) = cast::reinterpret_cast(&(*self));
|
let ptr_a: (uint, uint) = cast::reinterpret_cast(&(*self));
|
||||||
|
@ -36,7 +36,7 @@ impl Eq for LocalData {
|
||||||
|
|
||||||
// If TLS is used heavily in future, this could be made more efficient with a
|
// If TLS is used heavily in future, this could be made more efficient with a
|
||||||
// proper map.
|
// proper map.
|
||||||
type TaskLocalElement = (*libc::c_void, *libc::c_void, LocalData);
|
type TaskLocalElement = (*libc::c_void, *libc::c_void, @LocalData);
|
||||||
// Has to be a pointer at outermost layer; the foreign call returns void *.
|
// Has to be a pointer at outermost layer; the foreign call returns void *.
|
||||||
type TaskLocalMap = @mut ~[Option<TaskLocalElement>];
|
type TaskLocalMap = @mut ~[Option<TaskLocalElement>];
|
||||||
|
|
||||||
|
|
|
@ -8,12 +8,13 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use sys;
|
|
||||||
use cast;
|
use cast;
|
||||||
|
use libc::{c_void, size_t};
|
||||||
|
use rand::RngUtil;
|
||||||
|
use rand;
|
||||||
|
use sys;
|
||||||
use task;
|
use task;
|
||||||
use vec;
|
use vec;
|
||||||
use rand;
|
|
||||||
use libc::{c_void, size_t};
|
|
||||||
|
|
||||||
#[cfg(test)] use uint;
|
#[cfg(test)] use uint;
|
||||||
|
|
||||||
|
|
|
@ -202,10 +202,13 @@ pub fn replace_expr_in_crate(crate: ast::crate, i: uint,
|
||||||
newexpr: ast::expr, tm: test_mode) ->
|
newexpr: ast::expr, tm: test_mode) ->
|
||||||
ast::crate {
|
ast::crate {
|
||||||
let j: @mut uint = @mut 0u;
|
let j: @mut uint = @mut 0u;
|
||||||
fn fold_expr_rep(j_: @mut uint, i_: uint, newexpr_: ast::expr_,
|
fn fold_expr_rep(j_: @mut uint,
|
||||||
original: &ast::expr_, fld: fold::ast_fold,
|
i_: uint,
|
||||||
tm_: test_mode) ->
|
newexpr_: ast::expr_,
|
||||||
ast::expr_ {
|
original: &ast::expr_,
|
||||||
|
fld: @fold::ast_fold,
|
||||||
|
tm_: test_mode)
|
||||||
|
-> ast::expr_ {
|
||||||
*j_ += 1u;
|
*j_ += 1u;
|
||||||
if i_ + 1u == *j_ && safe_to_replace_expr(original, tm_) {
|
if i_ + 1u == *j_ && safe_to_replace_expr(original, tm_) {
|
||||||
newexpr_
|
newexpr_
|
||||||
|
@ -229,10 +232,13 @@ pub fn replace_expr_in_crate(crate: ast::crate, i: uint,
|
||||||
pub fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty,
|
pub fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty,
|
||||||
tm: test_mode) -> ast::crate {
|
tm: test_mode) -> ast::crate {
|
||||||
let j: @mut uint = @mut 0u;
|
let j: @mut uint = @mut 0u;
|
||||||
fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_,
|
fn fold_ty_rep(j_: @mut uint,
|
||||||
original: &ast::ty_, fld: fold::ast_fold,
|
i_: uint,
|
||||||
tm_: test_mode) ->
|
newty_: ast::ty_,
|
||||||
ast::ty_ {
|
original: &ast::ty_,
|
||||||
|
fld: @fold::ast_fold,
|
||||||
|
tm_: test_mode)
|
||||||
|
-> ast::ty_ {
|
||||||
*j_ += 1u;
|
*j_ += 1u;
|
||||||
if i_ + 1u == *j_ && safe_to_replace_ty(original, tm_) {
|
if i_ + 1u == *j_ && safe_to_replace_ty(original, tm_) {
|
||||||
newty_
|
newty_
|
||||||
|
@ -252,7 +258,7 @@ pub fn under(n: uint, it: &fn(uint)) {
|
||||||
while i < n { it(i); i += 1u; }
|
while i < n { it(i); i += 1u; }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_str(f: @fn(+x: io::Writer)) -> ~str {
|
pub fn as_str(f: @fn(+x: @io::Writer)) -> ~str {
|
||||||
io::with_str_writer(f)
|
io::with_str_writer(f)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -304,7 +310,8 @@ pub fn check_variants_T<T: Copy>(
|
||||||
diagnostic::mk_span_handler(handler, codemap),
|
diagnostic::mk_span_handler(handler, codemap),
|
||||||
crate2,
|
crate2,
|
||||||
fname,
|
fname,
|
||||||
rdr, a,
|
rdr,
|
||||||
|
a,
|
||||||
pprust::no_ann(),
|
pprust::no_ann(),
|
||||||
false))
|
false))
|
||||||
};
|
};
|
||||||
|
|
|
@ -681,7 +681,7 @@ pub fn build_session(sopts: @session::options,
|
||||||
pub fn build_session_(sopts: @session::options,
|
pub fn build_session_(sopts: @session::options,
|
||||||
cm: @codemap::CodeMap,
|
cm: @codemap::CodeMap,
|
||||||
demitter: diagnostic::Emitter,
|
demitter: diagnostic::Emitter,
|
||||||
span_diagnostic_handler: diagnostic::span_handler)
|
span_diagnostic_handler: @diagnostic::span_handler)
|
||||||
-> Session {
|
-> Session {
|
||||||
let target_cfg = build_target_config(sopts, demitter);
|
let target_cfg = build_target_config(sopts, demitter);
|
||||||
let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
|
let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
|
||||||
|
@ -870,7 +870,7 @@ pub fn early_error(emitter: diagnostic::Emitter, msg: ~str) -> ! {
|
||||||
fail!();
|
fail!();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_metadata(sess: Session, path: &Path, out: io::Writer) {
|
pub fn list_metadata(sess: Session, path: &Path, out: @io::Writer) {
|
||||||
metadata::loader::list_file_metadata(
|
metadata::loader::list_file_metadata(
|
||||||
sess.parse_sess.interner,
|
sess.parse_sess.interner,
|
||||||
session::sess_os_to_meta_os(sess.targ_cfg.os), path, out);
|
session::sess_os_to_meta_os(sess.targ_cfg.os), path, out);
|
||||||
|
|
|
@ -154,8 +154,8 @@ pub struct Session_ {
|
||||||
codemap: @codemap::CodeMap,
|
codemap: @codemap::CodeMap,
|
||||||
// For a library crate, this is always none
|
// For a library crate, this is always none
|
||||||
main_fn: @mut Option<(node_id, codemap::span)>,
|
main_fn: @mut Option<(node_id, codemap::span)>,
|
||||||
span_diagnostic: diagnostic::span_handler,
|
span_diagnostic: @diagnostic::span_handler,
|
||||||
filesearch: filesearch::FileSearch,
|
filesearch: @filesearch::FileSearch,
|
||||||
building_library: @mut bool,
|
building_library: @mut bool,
|
||||||
working_dir: Path,
|
working_dir: Path,
|
||||||
lint_settings: lint::LintSettings
|
lint_settings: lint::LintSettings
|
||||||
|
@ -227,7 +227,7 @@ pub impl Session {
|
||||||
fn next_node_id(&self) -> ast::node_id {
|
fn next_node_id(&self) -> ast::node_id {
|
||||||
return syntax::parse::next_node_id(self.parse_sess);
|
return syntax::parse::next_node_id(self.parse_sess);
|
||||||
}
|
}
|
||||||
fn diagnostic(&self) -> diagnostic::span_handler {
|
fn diagnostic(&self) -> @diagnostic::span_handler {
|
||||||
self.span_diagnostic
|
self.span_diagnostic
|
||||||
}
|
}
|
||||||
fn debugging_opt(&self, opt: uint) -> bool {
|
fn debugging_opt(&self, opt: uint) -> bool {
|
||||||
|
|
|
@ -63,7 +63,7 @@ fn filter_view_item(cx: @Context, &&view_item: @ast::view_item
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_mod(cx: @Context, m: &ast::_mod, fld: fold::ast_fold) -> ast::_mod {
|
fn fold_mod(cx: @Context, m: &ast::_mod, fld: @fold::ast_fold) -> ast::_mod {
|
||||||
let filtered_items =
|
let filtered_items =
|
||||||
m.items.filter_mapped(|a| filter_item(cx, *a));
|
m.items.filter_mapped(|a| filter_item(cx, *a));
|
||||||
let filtered_view_items =
|
let filtered_view_items =
|
||||||
|
@ -84,7 +84,7 @@ fn filter_foreign_item(cx: @Context, &&item: @ast::foreign_item) ->
|
||||||
fn fold_foreign_mod(
|
fn fold_foreign_mod(
|
||||||
cx: @Context,
|
cx: @Context,
|
||||||
nm: &ast::foreign_mod,
|
nm: &ast::foreign_mod,
|
||||||
fld: fold::ast_fold
|
fld: @fold::ast_fold
|
||||||
) -> ast::foreign_mod {
|
) -> ast::foreign_mod {
|
||||||
let filtered_items =
|
let filtered_items =
|
||||||
nm.items.filter_mapped(|a| filter_foreign_item(cx, *a));
|
nm.items.filter_mapped(|a| filter_foreign_item(cx, *a));
|
||||||
|
@ -99,7 +99,7 @@ fn fold_foreign_mod(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_item_underscore(cx: @Context, item: &ast::item_,
|
fn fold_item_underscore(cx: @Context, item: &ast::item_,
|
||||||
fld: fold::ast_fold) -> ast::item_ {
|
fld: @fold::ast_fold) -> ast::item_ {
|
||||||
let item = match *item {
|
let item = match *item {
|
||||||
ast::item_impl(ref a, b, c, ref methods) => {
|
ast::item_impl(ref a, b, c, ref methods) => {
|
||||||
let methods = methods.filtered(|m| method_in_cfg(cx, *m) );
|
let methods = methods.filtered(|m| method_in_cfg(cx, *m) );
|
||||||
|
@ -135,7 +135,7 @@ fn filter_stmt(cx: @Context, &&stmt: @ast::stmt) ->
|
||||||
fn fold_block(
|
fn fold_block(
|
||||||
cx: @Context,
|
cx: @Context,
|
||||||
b: &ast::blk_,
|
b: &ast::blk_,
|
||||||
fld: fold::ast_fold
|
fld: @fold::ast_fold
|
||||||
) -> ast::blk_ {
|
) -> ast::blk_ {
|
||||||
let filtered_stmts =
|
let filtered_stmts =
|
||||||
b.stmts.filter_mapped(|a| filter_stmt(cx, *a));
|
b.stmts.filter_mapped(|a| filter_stmt(cx, *a));
|
||||||
|
|
|
@ -128,7 +128,7 @@ pub mod intrinsic {
|
||||||
#[abi = "rust-intrinsic"]
|
#[abi = "rust-intrinsic"]
|
||||||
pub extern {
|
pub extern {
|
||||||
pub fn get_tydesc<T>() -> *();
|
pub fn get_tydesc<T>() -> *();
|
||||||
pub fn visit_tydesc(++td: *TyDesc, &&tv: TyVisitor);
|
pub fn visit_tydesc(++td: *TyDesc, &&tv: @TyVisitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ struct TestCtxt {
|
||||||
sess: session::Session,
|
sess: session::Session,
|
||||||
crate: @ast::crate,
|
crate: @ast::crate,
|
||||||
path: ~[ast::ident],
|
path: ~[ast::ident],
|
||||||
ext_cx: ext_ctxt,
|
ext_cx: @ext_ctxt,
|
||||||
testfns: ~[Test]
|
testfns: ~[Test]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate {
|
||||||
|
|
||||||
fn fold_mod(cx: @mut TestCtxt,
|
fn fold_mod(cx: @mut TestCtxt,
|
||||||
m: &ast::_mod,
|
m: &ast::_mod,
|
||||||
fld: fold::ast_fold)
|
fld: @fold::ast_fold)
|
||||||
-> ast::_mod {
|
-> ast::_mod {
|
||||||
// Remove any #[main] from the AST so it doesn't clash with
|
// Remove any #[main] from the AST so it doesn't clash with
|
||||||
// the one we're going to add. Only if compiling an executable.
|
// the one we're going to add. Only if compiling an executable.
|
||||||
|
@ -125,7 +125,7 @@ fn fold_mod(cx: @mut TestCtxt,
|
||||||
|
|
||||||
fn fold_crate(cx: @mut TestCtxt,
|
fn fold_crate(cx: @mut TestCtxt,
|
||||||
c: &ast::crate_,
|
c: &ast::crate_,
|
||||||
fld: fold::ast_fold)
|
fld: @fold::ast_fold)
|
||||||
-> ast::crate_ {
|
-> ast::crate_ {
|
||||||
let folded = fold::noop_fold_crate(c, fld);
|
let folded = fold::noop_fold_crate(c, fld);
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ fn fold_crate(cx: @mut TestCtxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn fold_item(cx: @mut TestCtxt, &&i: @ast::item, fld: fold::ast_fold)
|
fn fold_item(cx: @mut TestCtxt, &&i: @ast::item, fld: @fold::ast_fold)
|
||||||
-> Option<@ast::item> {
|
-> Option<@ast::item> {
|
||||||
cx.path.push(i.ident);
|
cx.path.push(i.ident);
|
||||||
debug!("current path: %s",
|
debug!("current path: %s",
|
||||||
|
|
|
@ -30,10 +30,10 @@ use std::oldmap::HashMap;
|
||||||
|
|
||||||
// Traverses an AST, reading all the information about use'd crates and extern
|
// Traverses an AST, reading all the information about use'd crates and extern
|
||||||
// libraries necessary for later resolving, typechecking, linking, etc.
|
// libraries necessary for later resolving, typechecking, linking, etc.
|
||||||
pub fn read_crates(diag: span_handler,
|
pub fn read_crates(diag: @span_handler,
|
||||||
crate: ast::crate,
|
crate: ast::crate,
|
||||||
cstore: @mut cstore::CStore,
|
cstore: @mut cstore::CStore,
|
||||||
filesearch: FileSearch,
|
filesearch: @FileSearch,
|
||||||
os: loader::os,
|
os: loader::os,
|
||||||
statik: bool,
|
statik: bool,
|
||||||
intr: @ident_interner) {
|
intr: @ident_interner) {
|
||||||
|
@ -75,7 +75,7 @@ fn dump_crates(crate_cache: @mut ~[cache_entry]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn warn_if_multiple_versions(e: @mut Env,
|
fn warn_if_multiple_versions(e: @mut Env,
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
crate_cache: @mut ~[cache_entry]) {
|
crate_cache: @mut ~[cache_entry]) {
|
||||||
use core::either::*;
|
use core::either::*;
|
||||||
|
|
||||||
|
@ -115,8 +115,8 @@ fn warn_if_multiple_versions(e: @mut Env,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Env {
|
struct Env {
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
filesearch: FileSearch,
|
filesearch: @FileSearch,
|
||||||
cstore: @mut cstore::CStore,
|
cstore: @mut cstore::CStore,
|
||||||
os: loader::os,
|
os: loader::os,
|
||||||
statik: bool,
|
statik: bool,
|
||||||
|
|
|
@ -1017,14 +1017,15 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list_meta_items(intr: @ident_interner,
|
fn list_meta_items(intr: @ident_interner,
|
||||||
meta_items: ebml::Doc, out: io::Writer) {
|
meta_items: ebml::Doc,
|
||||||
|
out: @io::Writer) {
|
||||||
for get_meta_items(meta_items).each |mi| {
|
for get_meta_items(meta_items).each |mi| {
|
||||||
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
|
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
|
fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
|
||||||
out: io::Writer) {
|
out: @io::Writer) {
|
||||||
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
|
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
|
||||||
|
|
||||||
for get_attributes(md).each |attr| {
|
for get_attributes(md).each |attr| {
|
||||||
|
@ -1063,7 +1064,7 @@ pub fn get_crate_deps(intr: @ident_interner, data: @~[u8]) -> ~[crate_dep] {
|
||||||
return deps;
|
return deps;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list_crate_deps(intr: @ident_interner, data: @~[u8], out: io::Writer) {
|
fn list_crate_deps(intr: @ident_interner, data: @~[u8], out: @io::Writer) {
|
||||||
out.write_str(~"=External Dependencies=\n");
|
out.write_str(~"=External Dependencies=\n");
|
||||||
|
|
||||||
for get_crate_deps(intr, data).each |dep| {
|
for get_crate_deps(intr, data).each |dep| {
|
||||||
|
@ -1106,7 +1107,7 @@ fn iter_crate_items(intr: @ident_interner, cdata: cmd,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_crate_metadata(intr: @ident_interner, bytes: @~[u8],
|
pub fn list_crate_metadata(intr: @ident_interner, bytes: @~[u8],
|
||||||
out: io::Writer) {
|
out: @io::Writer) {
|
||||||
let hash = get_crate_hash(bytes);
|
let hash = get_crate_hash(bytes);
|
||||||
let md = reader::Doc(bytes);
|
let md = reader::Doc(bytes);
|
||||||
list_crate_attributes(intr, md, *hash, out);
|
list_crate_attributes(intr, md, *hash, out);
|
||||||
|
|
|
@ -57,7 +57,7 @@ pub type encode_inlined_item = @fn(ecx: @EncodeContext,
|
||||||
ii: ast::inlined_item);
|
ii: ast::inlined_item);
|
||||||
|
|
||||||
pub struct EncodeParams {
|
pub struct EncodeParams {
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
tcx: ty::ctxt,
|
tcx: ty::ctxt,
|
||||||
reachable: HashMap<ast::node_id, ()>,
|
reachable: HashMap<ast::node_id, ()>,
|
||||||
reexports2: middle::resolve::ExportMap2,
|
reexports2: middle::resolve::ExportMap2,
|
||||||
|
@ -83,7 +83,7 @@ struct Stats {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct EncodeContext {
|
pub struct EncodeContext {
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
tcx: ty::ctxt,
|
tcx: ty::ctxt,
|
||||||
stats: @mut Stats,
|
stats: @mut Stats,
|
||||||
reachable: HashMap<ast::node_id, ()>,
|
reachable: HashMap<ast::node_id, ()>,
|
||||||
|
@ -1054,7 +1054,7 @@ fn create_index<T:Copy + Hash + IterBytes>(index: ~[entry<T>]) ->
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_index<T>(ebml_w: writer::Encoder, buckets: ~[@~[entry<T>]],
|
fn encode_index<T>(ebml_w: writer::Encoder, buckets: ~[@~[entry<T>]],
|
||||||
write_fn: &fn(io::Writer, T)) {
|
write_fn: &fn(@io::Writer, T)) {
|
||||||
let writer = ebml_w.writer;
|
let writer = ebml_w.writer;
|
||||||
ebml_w.start_tag(tag_index);
|
ebml_w.start_tag(tag_index);
|
||||||
let mut bucket_locs: ~[uint] = ~[];
|
let mut bucket_locs: ~[uint] = ~[];
|
||||||
|
@ -1081,9 +1081,9 @@ fn encode_index<T>(ebml_w: writer::Encoder, buckets: ~[@~[entry<T>]],
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_str(writer: io::Writer, &&s: ~str) { writer.write_str(s); }
|
fn write_str(writer: @io::Writer, &&s: ~str) { writer.write_str(s); }
|
||||||
|
|
||||||
fn write_int(writer: io::Writer, &&n: int) {
|
fn write_int(writer: @io::Writer, &&n: int) {
|
||||||
fail_unless!(n < 0x7fff_ffff);
|
fail_unless!(n < 0x7fff_ffff);
|
||||||
writer.write_be_u32(n as u32);
|
writer.write_be_u32(n as u32);
|
||||||
}
|
}
|
||||||
|
@ -1326,7 +1326,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
|
||||||
type_abbrevs: ty::new_ty_hash()
|
type_abbrevs: ty::new_ty_hash()
|
||||||
};
|
};
|
||||||
|
|
||||||
let ebml_w = writer::Encoder(wr as io::Writer);
|
let ebml_w = writer::Encoder(wr as @io::Writer);
|
||||||
|
|
||||||
encode_hash(ebml_w, ecx.link_meta.extras_hash);
|
encode_hash(ebml_w, ecx.link_meta.extras_hash);
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,8 @@ pub trait FileSearch {
|
||||||
|
|
||||||
pub fn mk_filesearch(maybe_sysroot: Option<Path>,
|
pub fn mk_filesearch(maybe_sysroot: Option<Path>,
|
||||||
target_triple: &str,
|
target_triple: &str,
|
||||||
+addl_lib_search_paths: ~[Path]) -> FileSearch {
|
+addl_lib_search_paths: ~[Path])
|
||||||
|
-> @FileSearch {
|
||||||
struct FileSearchImpl {
|
struct FileSearchImpl {
|
||||||
sysroot: Path,
|
sysroot: Path,
|
||||||
addl_lib_search_paths: ~[Path],
|
addl_lib_search_paths: ~[Path],
|
||||||
|
@ -78,7 +79,7 @@ pub fn mk_filesearch(maybe_sysroot: Option<Path>,
|
||||||
} as @FileSearch
|
} as @FileSearch
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn search<T:Copy>(filesearch: FileSearch, pick: pick<T>) -> Option<T> {
|
pub fn search<T:Copy>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
|
||||||
let mut rslt = None;
|
let mut rslt = None;
|
||||||
for filesearch.lib_search_paths().each |lib_search_path| {
|
for filesearch.lib_search_paths().each |lib_search_path| {
|
||||||
debug!("searching %s", lib_search_path.to_str());
|
debug!("searching %s", lib_search_path.to_str());
|
||||||
|
|
|
@ -44,8 +44,8 @@ pub enum os {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Context {
|
pub struct Context {
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
filesearch: FileSearch,
|
filesearch: @FileSearch,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
metas: ~[@ast::meta_item],
|
metas: ~[@ast::meta_item],
|
||||||
|
@ -87,7 +87,7 @@ fn libname(cx: Context) -> (~str, ~str) {
|
||||||
fn find_library_crate_aux(
|
fn find_library_crate_aux(
|
||||||
cx: Context,
|
cx: Context,
|
||||||
(prefix, suffix): (~str, ~str),
|
(prefix, suffix): (~str, ~str),
|
||||||
filesearch: filesearch::FileSearch
|
filesearch: @filesearch::FileSearch
|
||||||
) -> Option<(~str, @~[u8])> {
|
) -> Option<(~str, @~[u8])> {
|
||||||
let crate_name = crate_name_from_metas(cx.metas);
|
let crate_name = crate_name_from_metas(cx.metas);
|
||||||
let prefix: ~str = prefix + *crate_name + ~"-";
|
let prefix: ~str = prefix + *crate_name + ~"-";
|
||||||
|
@ -156,7 +156,8 @@ pub fn crate_name_from_metas(metas: &[@ast::meta_item]) -> @~str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn note_linkage_attrs(intr: @ident_interner, diag: span_handler,
|
pub fn note_linkage_attrs(intr: @ident_interner,
|
||||||
|
diag: @span_handler,
|
||||||
attrs: ~[ast::attribute]) {
|
attrs: ~[ast::attribute]) {
|
||||||
for attr::find_linkage_metas(attrs).each |mi| {
|
for attr::find_linkage_metas(attrs).each |mi| {
|
||||||
diag.handler().note(fmt!("meta: %s",
|
diag.handler().note(fmt!("meta: %s",
|
||||||
|
@ -252,7 +253,9 @@ pub fn meta_section_name(os: os) -> ~str {
|
||||||
|
|
||||||
// A diagnostic function for dumping crate metadata to an output stream
|
// A diagnostic function for dumping crate metadata to an output stream
|
||||||
pub fn list_file_metadata(intr: @ident_interner,
|
pub fn list_file_metadata(intr: @ident_interner,
|
||||||
os: os, path: &Path, out: io::Writer) {
|
os: os,
|
||||||
|
path: &Path,
|
||||||
|
out: @io::Writer) {
|
||||||
match get_metadata_section(os, path) {
|
match get_metadata_section(os, path) {
|
||||||
option::Some(bytes) => decoder::list_crate_metadata(intr, bytes, out),
|
option::Some(bytes) => decoder::list_crate_metadata(intr, bytes, out),
|
||||||
option::None => {
|
option::None => {
|
||||||
|
|
|
@ -27,7 +27,7 @@ use syntax::print::pprust::*;
|
||||||
use middle::ty::Vid;
|
use middle::ty::Vid;
|
||||||
|
|
||||||
pub struct ctxt {
|
pub struct ctxt {
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
// Def -> str Callback:
|
// Def -> str Callback:
|
||||||
ds: @fn(def_id) -> ~str,
|
ds: @fn(def_id) -> ~str,
|
||||||
// The type context.
|
// The type context.
|
||||||
|
@ -57,7 +57,7 @@ fn cx_uses_abbrevs(cx: @ctxt) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enc_ty(w: io::Writer, cx: @ctxt, t: ty::t) {
|
pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
|
||||||
match cx.abbrevs {
|
match cx.abbrevs {
|
||||||
ac_no_abbrevs => {
|
ac_no_abbrevs => {
|
||||||
let result_str = match cx.tcx.short_names_cache.find(&t) {
|
let result_str = match cx.tcx.short_names_cache.find(&t) {
|
||||||
|
@ -113,7 +113,7 @@ pub fn enc_ty(w: io::Writer, cx: @ctxt, t: ty::t) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn enc_mt(w: io::Writer, cx: @ctxt, mt: ty::mt) {
|
fn enc_mt(w: @io::Writer, cx: @ctxt, mt: ty::mt) {
|
||||||
match mt.mutbl {
|
match mt.mutbl {
|
||||||
m_imm => (),
|
m_imm => (),
|
||||||
m_mutbl => w.write_char('m'),
|
m_mutbl => w.write_char('m'),
|
||||||
|
@ -122,7 +122,7 @@ fn enc_mt(w: io::Writer, cx: @ctxt, mt: ty::mt) {
|
||||||
enc_ty(w, cx, mt.ty);
|
enc_ty(w, cx, mt.ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_opt<T>(w: io::Writer, t: Option<T>, enc_f: &fn(T)) {
|
fn enc_opt<T>(w: @io::Writer, t: Option<T>, enc_f: &fn(T)) {
|
||||||
match &t {
|
match &t {
|
||||||
&None => w.write_char('n'),
|
&None => w.write_char('n'),
|
||||||
&Some(ref v) => {
|
&Some(ref v) => {
|
||||||
|
@ -132,7 +132,7 @@ fn enc_opt<T>(w: io::Writer, t: Option<T>, enc_f: &fn(T)) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_substs(w: io::Writer, cx: @ctxt, substs: ty::substs) {
|
fn enc_substs(w: @io::Writer, cx: @ctxt, substs: ty::substs) {
|
||||||
do enc_opt(w, substs.self_r) |r| { enc_region(w, cx, r) }
|
do enc_opt(w, substs.self_r) |r| { enc_region(w, cx, r) }
|
||||||
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
|
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
|
||||||
w.write_char('[');
|
w.write_char('[');
|
||||||
|
@ -140,7 +140,7 @@ fn enc_substs(w: io::Writer, cx: @ctxt, substs: ty::substs) {
|
||||||
w.write_char(']');
|
w.write_char(']');
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_region(w: io::Writer, cx: @ctxt, r: ty::Region) {
|
fn enc_region(w: @io::Writer, cx: @ctxt, r: ty::Region) {
|
||||||
match r {
|
match r {
|
||||||
ty::re_bound(br) => {
|
ty::re_bound(br) => {
|
||||||
w.write_char('b');
|
w.write_char('b');
|
||||||
|
@ -169,7 +169,7 @@ fn enc_region(w: io::Writer, cx: @ctxt, r: ty::Region) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_bound_region(w: io::Writer, cx: @ctxt, br: ty::bound_region) {
|
fn enc_bound_region(w: @io::Writer, cx: @ctxt, br: ty::bound_region) {
|
||||||
match br {
|
match br {
|
||||||
ty::br_self => w.write_char('s'),
|
ty::br_self => w.write_char('s'),
|
||||||
ty::br_anon(idx) => {
|
ty::br_anon(idx) => {
|
||||||
|
@ -194,7 +194,7 @@ fn enc_bound_region(w: io::Writer, cx: @ctxt, br: ty::bound_region) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enc_vstore(w: io::Writer, cx: @ctxt, v: ty::vstore) {
|
pub fn enc_vstore(w: @io::Writer, cx: @ctxt, v: ty::vstore) {
|
||||||
w.write_char('/');
|
w.write_char('/');
|
||||||
match v {
|
match v {
|
||||||
ty::vstore_fixed(u) => {
|
ty::vstore_fixed(u) => {
|
||||||
|
@ -214,7 +214,7 @@ pub fn enc_vstore(w: io::Writer, cx: @ctxt, v: ty::vstore) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enc_trait_store(w: io::Writer, cx: @ctxt, s: ty::TraitStore) {
|
pub fn enc_trait_store(w: @io::Writer, cx: @ctxt, s: ty::TraitStore) {
|
||||||
match s {
|
match s {
|
||||||
ty::UniqTraitStore => w.write_char('~'),
|
ty::UniqTraitStore => w.write_char('~'),
|
||||||
ty::BoxTraitStore => w.write_char('@'),
|
ty::BoxTraitStore => w.write_char('@'),
|
||||||
|
@ -226,7 +226,7 @@ pub fn enc_trait_store(w: io::Writer, cx: @ctxt, s: ty::TraitStore) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_sty(w: io::Writer, cx: @ctxt, +st: ty::sty) {
|
fn enc_sty(w: @io::Writer, cx: @ctxt, +st: ty::sty) {
|
||||||
match st {
|
match st {
|
||||||
ty::ty_nil => w.write_char('n'),
|
ty::ty_nil => w.write_char('n'),
|
||||||
ty::ty_bot => w.write_char('z'),
|
ty::ty_bot => w.write_char('z'),
|
||||||
|
@ -337,7 +337,7 @@ fn enc_sty(w: io::Writer, cx: @ctxt, +st: ty::sty) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_sigil(w: io::Writer, sigil: Sigil) {
|
fn enc_sigil(w: @io::Writer, sigil: Sigil) {
|
||||||
match sigil {
|
match sigil {
|
||||||
ManagedSigil => w.write_str("@"),
|
ManagedSigil => w.write_str("@"),
|
||||||
OwnedSigil => w.write_str("~"),
|
OwnedSigil => w.write_str("~"),
|
||||||
|
@ -345,19 +345,19 @@ fn enc_sigil(w: io::Writer, sigil: Sigil) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enc_arg(w: io::Writer, cx: @ctxt, arg: ty::arg) {
|
pub fn enc_arg(w: @io::Writer, cx: @ctxt, arg: ty::arg) {
|
||||||
enc_mode(w, cx, arg.mode);
|
enc_mode(w, cx, arg.mode);
|
||||||
enc_ty(w, cx, arg.ty);
|
enc_ty(w, cx, arg.ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enc_mode(w: io::Writer, cx: @ctxt, m: mode) {
|
pub fn enc_mode(w: @io::Writer, cx: @ctxt, m: mode) {
|
||||||
match ty::resolved_mode(cx.tcx, m) {
|
match ty::resolved_mode(cx.tcx, m) {
|
||||||
by_copy => w.write_char('+'),
|
by_copy => w.write_char('+'),
|
||||||
by_ref => w.write_char('='),
|
by_ref => w.write_char('='),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_purity(w: io::Writer, p: purity) {
|
fn enc_purity(w: @io::Writer, p: purity) {
|
||||||
match p {
|
match p {
|
||||||
pure_fn => w.write_char('p'),
|
pure_fn => w.write_char('p'),
|
||||||
impure_fn => w.write_char('i'),
|
impure_fn => w.write_char('i'),
|
||||||
|
@ -366,26 +366,26 @@ fn enc_purity(w: io::Writer, p: purity) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_abi(w: io::Writer, a: Abi) {
|
fn enc_abi(w: @io::Writer, a: Abi) {
|
||||||
match a {
|
match a {
|
||||||
RustAbi => w.write_char('r'),
|
RustAbi => w.write_char('r'),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_onceness(w: io::Writer, o: Onceness) {
|
fn enc_onceness(w: @io::Writer, o: Onceness) {
|
||||||
match o {
|
match o {
|
||||||
Once => w.write_char('o'),
|
Once => w.write_char('o'),
|
||||||
Many => w.write_char('m')
|
Many => w.write_char('m')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_bare_fn_ty(w: io::Writer, cx: @ctxt, ft: &ty::BareFnTy) {
|
fn enc_bare_fn_ty(w: @io::Writer, cx: @ctxt, ft: &ty::BareFnTy) {
|
||||||
enc_purity(w, ft.purity);
|
enc_purity(w, ft.purity);
|
||||||
enc_abi(w, ft.abi);
|
enc_abi(w, ft.abi);
|
||||||
enc_fn_sig(w, cx, &ft.sig);
|
enc_fn_sig(w, cx, &ft.sig);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_closure_ty(w: io::Writer, cx: @ctxt, ft: &ty::ClosureTy) {
|
fn enc_closure_ty(w: @io::Writer, cx: @ctxt, ft: &ty::ClosureTy) {
|
||||||
enc_sigil(w, ft.sigil);
|
enc_sigil(w, ft.sigil);
|
||||||
enc_purity(w, ft.purity);
|
enc_purity(w, ft.purity);
|
||||||
enc_onceness(w, ft.onceness);
|
enc_onceness(w, ft.onceness);
|
||||||
|
@ -393,7 +393,7 @@ fn enc_closure_ty(w: io::Writer, cx: @ctxt, ft: &ty::ClosureTy) {
|
||||||
enc_fn_sig(w, cx, &ft.sig);
|
enc_fn_sig(w, cx, &ft.sig);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_fn_sig(w: io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
|
fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
|
||||||
w.write_char('[');
|
w.write_char('[');
|
||||||
for fsig.inputs.each |arg| {
|
for fsig.inputs.each |arg| {
|
||||||
enc_arg(w, cx, *arg);
|
enc_arg(w, cx, *arg);
|
||||||
|
@ -402,7 +402,7 @@ fn enc_fn_sig(w: io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
|
||||||
enc_ty(w, cx, fsig.output);
|
enc_ty(w, cx, fsig.output);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enc_bounds(w: io::Writer, cx: @ctxt, bs: @~[ty::param_bound]) {
|
pub fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: @~[ty::param_bound]) {
|
||||||
for vec::each(*bs) |bound| {
|
for vec::each(*bs) |bound| {
|
||||||
match *bound {
|
match *bound {
|
||||||
ty::bound_owned => w.write_char('S'),
|
ty::bound_owned => w.write_char('S'),
|
||||||
|
|
|
@ -292,7 +292,7 @@ fn encode_ast(ebml_w: writer::Encoder, item: ast::inlined_item) {
|
||||||
// nested items, as otherwise it would get confused when translating
|
// nested items, as otherwise it would get confused when translating
|
||||||
// inlined items.
|
// inlined items.
|
||||||
fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item {
|
fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item {
|
||||||
fn drop_nested_items(blk: &ast::blk_, fld: fold::ast_fold) -> ast::blk_ {
|
fn drop_nested_items(blk: &ast::blk_, fld: @fold::ast_fold) -> ast::blk_ {
|
||||||
let stmts_sans_items = do blk.stmts.filtered |stmt| {
|
let stmts_sans_items = do blk.stmts.filtered |stmt| {
|
||||||
match stmt.node {
|
match stmt.node {
|
||||||
ast::stmt_expr(_, _) | ast::stmt_semi(_, _) |
|
ast::stmt_expr(_, _) | ast::stmt_semi(_, _) |
|
||||||
|
|
|
@ -825,7 +825,8 @@ pub impl Liveness {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_vars(&self, wr: io::Writer,
|
fn write_vars(&self,
|
||||||
|
wr: @io::Writer,
|
||||||
ln: LiveNode,
|
ln: LiveNode,
|
||||||
test: &fn(uint) -> LiveNode) {
|
test: &fn(uint) -> LiveNode) {
|
||||||
let node_base_idx = self.idx(ln, Variable(0));
|
let node_base_idx = self.idx(ln, Variable(0));
|
||||||
|
|
|
@ -406,6 +406,6 @@ impl ABIInfo for X86_64_ABIInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn x86_64_abi_info() -> ABIInfo {
|
pub fn x86_64_abi_info() -> @ABIInfo {
|
||||||
return @X86_64_ABIInfo as @ABIInfo;
|
return @X86_64_ABIInfo as @ABIInfo;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1035,7 +1035,7 @@ pub fn T_captured_tydescs(cx: @CrateContext, n: uint) -> TypeRef {
|
||||||
|
|
||||||
pub fn T_opaque_trait(cx: @CrateContext, store: ty::TraitStore) -> TypeRef {
|
pub fn T_opaque_trait(cx: @CrateContext, store: ty::TraitStore) -> TypeRef {
|
||||||
match store {
|
match store {
|
||||||
ty::BoxTraitStore | ty::BareTraitStore => {
|
ty::BoxTraitStore => {
|
||||||
T_struct(~[T_ptr(cx.tydesc_type), T_opaque_box_ptr(cx)])
|
T_struct(~[T_ptr(cx.tydesc_type), T_opaque_box_ptr(cx)])
|
||||||
}
|
}
|
||||||
ty::UniqTraitStore => {
|
ty::UniqTraitStore => {
|
||||||
|
@ -1046,6 +1046,9 @@ pub fn T_opaque_trait(cx: @CrateContext, store: ty::TraitStore) -> TypeRef {
|
||||||
ty::RegionTraitStore(_) => {
|
ty::RegionTraitStore(_) => {
|
||||||
T_struct(~[T_ptr(cx.tydesc_type), T_ptr(T_i8())])
|
T_struct(~[T_ptr(cx.tydesc_type), T_ptr(T_i8())])
|
||||||
}
|
}
|
||||||
|
ty::BareTraitStore => {
|
||||||
|
cx.sess.bug(~"can't make T_opaque_trait with bare trait store")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ use syntax::{ast, ast_util};
|
||||||
use syntax::{attr, ast_map};
|
use syntax::{attr, ast_map};
|
||||||
use syntax::parse::token::special_idents;
|
use syntax::parse::token::special_idents;
|
||||||
|
|
||||||
fn abi_info(arch: session::arch) -> cabi::ABIInfo {
|
fn abi_info(arch: session::arch) -> @cabi::ABIInfo {
|
||||||
return match arch {
|
return match arch {
|
||||||
arch_x86_64 => x86_64_abi_info(),
|
arch_x86_64 => x86_64_abi_info(),
|
||||||
arch_arm => cabi_arm::abi_info(),
|
arch_arm => cabi_arm::abi_info(),
|
||||||
|
|
|
@ -244,7 +244,7 @@ pub struct InstantiatedTraitRef {
|
||||||
pub type ctxt = @ctxt_;
|
pub type ctxt = @ctxt_;
|
||||||
|
|
||||||
struct ctxt_ {
|
struct ctxt_ {
|
||||||
diag: syntax::diagnostic::span_handler,
|
diag: @syntax::diagnostic::span_handler,
|
||||||
interner: HashMap<intern_key, t_box>,
|
interner: HashMap<intern_key, t_box>,
|
||||||
next_id: @mut uint,
|
next_id: @mut uint,
|
||||||
vecs_implicitly_copyable: bool,
|
vecs_implicitly_copyable: bool,
|
||||||
|
@ -3600,11 +3600,7 @@ pub fn impl_traits(cx: ctxt, id: ast::def_id, store: TraitStore) -> ~[t] {
|
||||||
fn storeify(cx: ctxt, ty: t, store: TraitStore) -> t {
|
fn storeify(cx: ctxt, ty: t, store: TraitStore) -> t {
|
||||||
match ty::get(ty).sty {
|
match ty::get(ty).sty {
|
||||||
ty::ty_trait(did, ref substs, trait_store) => {
|
ty::ty_trait(did, ref substs, trait_store) => {
|
||||||
if store == trait_store ||
|
if store == trait_store {
|
||||||
(store == BareTraitStore &&
|
|
||||||
trait_store == BoxTraitStore) ||
|
|
||||||
(store == BoxTraitStore &&
|
|
||||||
trait_store == BareTraitStore) {
|
|
||||||
ty
|
ty
|
||||||
} else {
|
} else {
|
||||||
mk_trait(cx, did, (/*bad*/copy *substs), store)
|
mk_trait(cx, did, (/*bad*/copy *substs), store)
|
||||||
|
|
|
@ -218,7 +218,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + Durable>(
|
||||||
|
|
||||||
// Handle @, ~, and & being able to mean estrs and evecs.
|
// Handle @, ~, and & being able to mean estrs and evecs.
|
||||||
// If a_seq_ty is a str or a vec, make it an estr/evec.
|
// If a_seq_ty is a str or a vec, make it an estr/evec.
|
||||||
// Also handle function sigils and first-class trait types.
|
// Also handle first-class trait types.
|
||||||
fn mk_pointer<AC:AstConv,RS:region_scope + Copy + Durable>(
|
fn mk_pointer<AC:AstConv,RS:region_scope + Copy + Durable>(
|
||||||
self: &AC,
|
self: &AC,
|
||||||
rscope: &RS,
|
rscope: &RS,
|
||||||
|
|
|
@ -3204,6 +3204,19 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
|
||||||
fail_unless!(ccx.tcx.intrinsic_defs.contains_key(&ty_visitor_name));
|
fail_unless!(ccx.tcx.intrinsic_defs.contains_key(&ty_visitor_name));
|
||||||
let (_, tydesc_ty) = tcx.intrinsic_defs.get(&tydesc_name);
|
let (_, tydesc_ty) = tcx.intrinsic_defs.get(&tydesc_name);
|
||||||
let (_, visitor_trait) = tcx.intrinsic_defs.get(&ty_visitor_name);
|
let (_, visitor_trait) = tcx.intrinsic_defs.get(&ty_visitor_name);
|
||||||
|
|
||||||
|
let visitor_trait = match ty::get(visitor_trait).sty {
|
||||||
|
ty::ty_trait(trait_def_id, ref trait_substs, _) => {
|
||||||
|
ty::mk_trait(tcx,
|
||||||
|
trait_def_id,
|
||||||
|
copy *trait_substs,
|
||||||
|
ty::BoxTraitStore)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
tcx.sess.span_bug(it.span, ~"TyVisitor wasn't a trait?!")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {ty: tydesc_ty,
|
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {ty: tydesc_ty,
|
||||||
mutbl: ast::m_imm});
|
mutbl: ast::m_imm});
|
||||||
(0u, ~[arg(ast::by_copy, td_ptr),
|
(0u, ~[arg(ast::by_copy, td_ptr),
|
||||||
|
|
|
@ -198,12 +198,19 @@ pub fn lookup_vtable(vcx: &VtableContext,
|
||||||
vcx.infcx.ty_to_str(ity));
|
vcx.infcx.ty_to_str(ity));
|
||||||
|
|
||||||
match ty::get(ity).sty {
|
match ty::get(ity).sty {
|
||||||
ty::ty_trait(idid, _, _) => {
|
ty::ty_trait(idid, ref isubsts, _) => {
|
||||||
if trait_id == idid {
|
if trait_id == idid {
|
||||||
debug!("(checking vtable) @0 \
|
debug!("(checking vtable) @0 \
|
||||||
relating ty to trait \
|
relating ty to trait \
|
||||||
ty with did %?",
|
ty with did %?",
|
||||||
idid);
|
idid);
|
||||||
|
|
||||||
|
// Convert `ity` so that it has the right vstore.
|
||||||
|
let ity = ty::mk_trait(vcx.tcx(),
|
||||||
|
idid,
|
||||||
|
copy *isubsts,
|
||||||
|
trait_store);
|
||||||
|
|
||||||
relate_trait_tys(vcx, location_info,
|
relate_trait_tys(vcx, location_info,
|
||||||
trait_ty, ity);
|
trait_ty, ity);
|
||||||
let vtable = vtable_param(n, n_bound);
|
let vtable = vtable_param(n, n_bound);
|
||||||
|
@ -570,7 +577,6 @@ pub fn early_resolve_expr(ex: @ast::expr,
|
||||||
match (&ty::get(ty).sty, store) {
|
match (&ty::get(ty).sty, store) {
|
||||||
(&ty::ty_box(mt), ty::BoxTraitStore) |
|
(&ty::ty_box(mt), ty::BoxTraitStore) |
|
||||||
// XXX: Bare trait store is deprecated.
|
// XXX: Bare trait store is deprecated.
|
||||||
(&ty::ty_box(mt), ty::BareTraitStore) |
|
|
||||||
(&ty::ty_uniq(mt), ty::UniqTraitStore) |
|
(&ty::ty_uniq(mt), ty::UniqTraitStore) |
|
||||||
(&ty::ty_rptr(_, mt), ty::RegionTraitStore(*)) => {
|
(&ty::ty_rptr(_, mt), ty::RegionTraitStore(*)) => {
|
||||||
let location_info =
|
let location_info =
|
||||||
|
@ -622,8 +628,14 @@ pub fn early_resolve_expr(ex: @ast::expr,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// XXX: Remove bare below.
|
(_, ty::BareTraitStore) => {
|
||||||
(_, ty::BoxTraitStore) | (_, ty::BareTraitStore) => {
|
fcx.ccx.tcx.sess.span_err(
|
||||||
|
ex.span,
|
||||||
|
~"a sigil (`@`, `~`, or `&`) must be specified \
|
||||||
|
when casting to a trait");
|
||||||
|
}
|
||||||
|
|
||||||
|
(_, ty::BoxTraitStore) => {
|
||||||
fcx.ccx.tcx.sess.span_err(
|
fcx.ccx.tcx.sess.span_err(
|
||||||
ex.span,
|
ex.span,
|
||||||
fmt!("can only cast an @-pointer \
|
fmt!("can only cast an @-pointer \
|
||||||
|
|
|
@ -369,12 +369,6 @@ pub fn super_trait_stores<C:Combine>(self: &C,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// XXX: This should go away soon.
|
|
||||||
(ty::BareTraitStore, ty::BoxTraitStore) |
|
|
||||||
(ty::BoxTraitStore, ty::BareTraitStore) => {
|
|
||||||
Ok(ty::BoxTraitStore)
|
|
||||||
}
|
|
||||||
|
|
||||||
_ if a == b => {
|
_ if a == b => {
|
||||||
Ok(a)
|
Ok(a)
|
||||||
}
|
}
|
||||||
|
|
|
@ -125,7 +125,8 @@ pub struct binding_rscope {
|
||||||
|
|
||||||
pub fn in_binding_rscope<RS:region_scope + Copy + Durable>(self: &RS)
|
pub fn in_binding_rscope<RS:region_scope + Copy + Durable>(self: &RS)
|
||||||
-> binding_rscope {
|
-> binding_rscope {
|
||||||
let base = @(copy *self) as @region_scope;
|
let base = @copy *self;
|
||||||
|
let base = base as @region_scope;
|
||||||
binding_rscope { base: base, anon_bindings: @mut 0 }
|
binding_rscope { base: base, anon_bindings: @mut 0 }
|
||||||
}
|
}
|
||||||
impl region_scope for binding_rscope {
|
impl region_scope for binding_rscope {
|
||||||
|
|
|
@ -59,7 +59,7 @@ enum CmdAction {
|
||||||
|
|
||||||
/// A utility function that hands off a pretty printer to a callback.
|
/// A utility function that hands off a pretty printer to a callback.
|
||||||
fn with_pp(intr: @token::ident_interner,
|
fn with_pp(intr: @token::ident_interner,
|
||||||
cb: &fn(@pprust::ps, io::Writer)) -> ~str {
|
cb: &fn(@pprust::ps, @io::Writer)) -> ~str {
|
||||||
do io::with_str_writer |writer| {
|
do io::with_str_writer |writer| {
|
||||||
let pp = pprust::rust_printer(writer, intr);
|
let pp = pprust::rust_printer(writer, intr);
|
||||||
|
|
||||||
|
@ -257,7 +257,7 @@ fn get_line(prompt: ~str) -> Option<~str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run a command, e.g. :clear, :exit, etc.
|
/// Run a command, e.g. :clear, :exit, etc.
|
||||||
fn run_cmd(repl: &mut Repl, _in: io::Reader, _out: io::Writer,
|
fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
|
||||||
cmd: ~str, args: ~[~str]) -> CmdAction {
|
cmd: ~str, args: ~[~str]) -> CmdAction {
|
||||||
let mut action = action_none;
|
let mut action = action_none;
|
||||||
match cmd {
|
match cmd {
|
||||||
|
@ -334,7 +334,7 @@ fn run_cmd(repl: &mut Repl, _in: io::Reader, _out: io::Writer,
|
||||||
|
|
||||||
/// Executes a line of input, which may either be rust code or a
|
/// Executes a line of input, which may either be rust code or a
|
||||||
/// :command. Returns a new Repl if it has changed.
|
/// :command. Returns a new Repl if it has changed.
|
||||||
fn run_line(repl: &mut Repl, in: io::Reader, out: io::Writer, line: ~str)
|
fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str)
|
||||||
-> Option<Repl> {
|
-> Option<Repl> {
|
||||||
if line.starts_with(~":") {
|
if line.starts_with(~":") {
|
||||||
let full = line.substr(1, line.len() - 1);
|
let full = line.substr(1, line.len() - 1);
|
||||||
|
|
|
@ -70,13 +70,14 @@ struct ListenerFn {
|
||||||
struct ReadyCtx {
|
struct ReadyCtx {
|
||||||
sess: session::Session,
|
sess: session::Session,
|
||||||
crate: @ast::crate,
|
crate: @ast::crate,
|
||||||
ext_cx: ext_ctxt,
|
ext_cx: @ext_ctxt,
|
||||||
path: ~[ast::ident],
|
path: ~[ast::ident],
|
||||||
fns: ~[ListenerFn]
|
fns: ~[ListenerFn]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_mod(_ctx: @mut ReadyCtx, m: &ast::_mod,
|
fn fold_mod(_ctx: @mut ReadyCtx,
|
||||||
fold: fold::ast_fold) -> ast::_mod {
|
m: &ast::_mod,
|
||||||
|
fold: @fold::ast_fold) -> ast::_mod {
|
||||||
fn strip_main(item: @ast::item) -> @ast::item {
|
fn strip_main(item: @ast::item) -> @ast::item {
|
||||||
@ast::item {
|
@ast::item {
|
||||||
attrs: do item.attrs.filtered |attr| {
|
attrs: do item.attrs.filtered |attr| {
|
||||||
|
@ -94,9 +95,9 @@ fn fold_mod(_ctx: @mut ReadyCtx, m: &ast::_mod,
|
||||||
}, fold)
|
}, fold)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_item(ctx: @mut ReadyCtx, item: @ast::item,
|
fn fold_item(ctx: @mut ReadyCtx,
|
||||||
fold: fold::ast_fold) -> Option<@ast::item> {
|
item: @ast::item,
|
||||||
|
fold: @fold::ast_fold) -> Option<@ast::item> {
|
||||||
ctx.path.push(item.ident);
|
ctx.path.push(item.ident);
|
||||||
|
|
||||||
let attrs = attr::find_attrs_by_name(item.attrs, ~"pkg_do");
|
let attrs = attr::find_attrs_by_name(item.attrs, ~"pkg_do");
|
||||||
|
|
|
@ -1424,7 +1424,7 @@ mod tests {
|
||||||
fail_unless!(a.capacity() == uint::bits);
|
fail_unless!(a.capacity() == uint::bits);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rng() -> rand::Rng {
|
fn rng() -> @rand::Rng {
|
||||||
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
|
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
|
||||||
rand::seeded_rng(seed)
|
rand::seeded_rng(seed)
|
||||||
}
|
}
|
||||||
|
|
|
@ -415,11 +415,11 @@ pub mod writer {
|
||||||
|
|
||||||
// ebml writing
|
// ebml writing
|
||||||
pub struct Encoder {
|
pub struct Encoder {
|
||||||
writer: io::Writer,
|
writer: @io::Writer,
|
||||||
priv mut size_positions: ~[uint],
|
priv mut size_positions: ~[uint],
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_sized_vuint(w: io::Writer, n: uint, size: uint) {
|
fn write_sized_vuint(w: @io::Writer, n: uint, size: uint) {
|
||||||
match size {
|
match size {
|
||||||
1u => w.write(&[0x80u8 | (n as u8)]),
|
1u => w.write(&[0x80u8 | (n as u8)]),
|
||||||
2u => w.write(&[0x40u8 | ((n >> 8_u) as u8), n as u8]),
|
2u => w.write(&[0x40u8 | ((n >> 8_u) as u8), n as u8]),
|
||||||
|
@ -431,7 +431,7 @@ pub mod writer {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_vuint(w: io::Writer, n: uint) {
|
fn write_vuint(w: @io::Writer, n: uint) {
|
||||||
if n < 0x7f_u { write_sized_vuint(w, n, 1u); return; }
|
if n < 0x7f_u { write_sized_vuint(w, n, 1u); return; }
|
||||||
if n < 0x4000_u { write_sized_vuint(w, n, 2u); return; }
|
if n < 0x4000_u { write_sized_vuint(w, n, 2u); return; }
|
||||||
if n < 0x200000_u { write_sized_vuint(w, n, 3u); return; }
|
if n < 0x200000_u { write_sized_vuint(w, n, 3u); return; }
|
||||||
|
@ -439,7 +439,7 @@ pub mod writer {
|
||||||
fail!(fmt!("vint to write too big: %?", n));
|
fail!(fmt!("vint to write too big: %?", n));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn Encoder(w: io::Writer) -> Encoder {
|
pub fn Encoder(w: @io::Writer) -> Encoder {
|
||||||
let size_positions: ~[uint] = ~[];
|
let size_positions: ~[uint] = ~[];
|
||||||
Encoder { writer: w, mut size_positions: size_positions }
|
Encoder { writer: w, mut size_positions: size_positions }
|
||||||
}
|
}
|
||||||
|
|
|
@ -459,15 +459,15 @@ pub mod flatteners {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait FromReader {
|
pub trait FromReader {
|
||||||
static fn from_reader(r: Reader) -> Self;
|
static fn from_reader(r: @Reader) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait FromWriter {
|
pub trait FromWriter {
|
||||||
static fn from_writer(w: Writer) -> Self;
|
static fn from_writer(w: @Writer) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromReader for json::Decoder/&self {
|
impl FromReader for json::Decoder/&self {
|
||||||
static fn from_reader(r: Reader) -> json::Decoder/&self {
|
static fn from_reader(r: @Reader) -> json::Decoder/&self {
|
||||||
match json::from_reader(r) {
|
match json::from_reader(r) {
|
||||||
Ok(json) => {
|
Ok(json) => {
|
||||||
json::Decoder(json)
|
json::Decoder(json)
|
||||||
|
@ -478,13 +478,13 @@ pub mod flatteners {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWriter for json::Encoder {
|
impl FromWriter for json::Encoder {
|
||||||
static fn from_writer(w: Writer) -> json::Encoder {
|
static fn from_writer(w: @Writer) -> json::Encoder {
|
||||||
json::Encoder(w)
|
json::Encoder(w)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromReader for ebml::reader::Decoder {
|
impl FromReader for ebml::reader::Decoder {
|
||||||
static fn from_reader(r: Reader) -> ebml::reader::Decoder {
|
static fn from_reader(r: @Reader) -> ebml::reader::Decoder {
|
||||||
let buf = @r.read_whole_stream();
|
let buf = @r.read_whole_stream();
|
||||||
let doc = ebml::reader::Doc(buf);
|
let doc = ebml::reader::Doc(buf);
|
||||||
ebml::reader::Decoder(doc)
|
ebml::reader::Decoder(doc)
|
||||||
|
@ -492,7 +492,7 @@ pub mod flatteners {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromWriter for ebml::writer::Encoder {
|
impl FromWriter for ebml::writer::Encoder {
|
||||||
static fn from_writer(w: Writer) -> ebml::writer::Encoder {
|
static fn from_writer(w: @Writer) -> ebml::writer::Encoder {
|
||||||
ebml::writer::Encoder(w)
|
ebml::writer::Encoder(w)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,10 +74,10 @@ fn spaces(n: uint) -> ~str {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Encoder {
|
pub struct Encoder {
|
||||||
priv wr: io::Writer,
|
priv wr: @io::Writer,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn Encoder(wr: io::Writer) -> Encoder {
|
pub fn Encoder(wr: @io::Writer) -> Encoder {
|
||||||
Encoder { wr: wr }
|
Encoder { wr: wr }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,11 +208,11 @@ impl serialize::Encoder for Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PrettyEncoder {
|
pub struct PrettyEncoder {
|
||||||
priv wr: io::Writer,
|
priv wr: @io::Writer,
|
||||||
priv mut indent: uint,
|
priv mut indent: uint,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn PrettyEncoder(wr: io::Writer) -> PrettyEncoder {
|
pub fn PrettyEncoder(wr: @io::Writer) -> PrettyEncoder {
|
||||||
PrettyEncoder { wr: wr, indent: 0 }
|
PrettyEncoder { wr: wr, indent: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -346,7 +346,7 @@ impl<S:serialize::Encoder> serialize::Encodable<S> for Json {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes a json value into a io::writer
|
/// Encodes a json value into a io::writer
|
||||||
pub fn to_writer(wr: io::Writer, json: &Json) {
|
pub fn to_writer(wr: @io::Writer, json: &Json) {
|
||||||
json.encode(&Encoder(wr))
|
json.encode(&Encoder(wr))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,7 +359,7 @@ pub pure fn to_str(json: &Json) -> ~str {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes a json value into a io::writer
|
/// Encodes a json value into a io::writer
|
||||||
pub fn to_pretty_writer(wr: io::Writer, json: &Json) {
|
pub fn to_pretty_writer(wr: @io::Writer, json: &Json) {
|
||||||
json.encode(&PrettyEncoder(wr))
|
json.encode(&PrettyEncoder(wr))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -369,14 +369,14 @@ pub fn to_pretty_str(json: &Json) -> ~str {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Parser {
|
pub struct Parser {
|
||||||
priv rdr: io::Reader,
|
priv rdr: @io::Reader,
|
||||||
priv mut ch: char,
|
priv mut ch: char,
|
||||||
priv mut line: uint,
|
priv mut line: uint,
|
||||||
priv mut col: uint,
|
priv mut col: uint,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decode a json value from an io::reader
|
/// Decode a json value from an io::reader
|
||||||
pub fn Parser(rdr: io::Reader) -> Parser {
|
pub fn Parser(rdr: @io::Reader) -> Parser {
|
||||||
Parser {
|
Parser {
|
||||||
rdr: rdr,
|
rdr: rdr,
|
||||||
ch: rdr.read_char(),
|
ch: rdr.read_char(),
|
||||||
|
@ -734,8 +734,8 @@ priv impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decodes a json value from an io::reader
|
/// Decodes a json value from an @io::Reader
|
||||||
pub fn from_reader(rdr: io::Reader) -> Result<Json, Error> {
|
pub fn from_reader(rdr: @io::Reader) -> Result<Json, Error> {
|
||||||
Parser(rdr).parse()
|
Parser(rdr).parse()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1799,7 +1799,7 @@ pub mod test {
|
||||||
let sock_buf = @socket_buf(result::unwrap(conn_result));
|
let sock_buf = @socket_buf(result::unwrap(conn_result));
|
||||||
buf_write(sock_buf, expected_req);
|
buf_write(sock_buf, expected_req);
|
||||||
|
|
||||||
let buf_reader = sock_buf as Reader;
|
let buf_reader = sock_buf as @Reader;
|
||||||
let actual_response = str::from_bytes(buf_reader.read_whole_stream());
|
let actual_response = str::from_bytes(buf_reader.read_whole_stream());
|
||||||
debug!("Actual response: %s", actual_response);
|
debug!("Actual response: %s", actual_response);
|
||||||
fail_unless!(expected_resp == actual_response);
|
fail_unless!(expected_resp == actual_response);
|
||||||
|
|
|
@ -326,7 +326,7 @@ pub mod chained {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub impl<K:Eq + IterBytes + Hash + Copy + ToStr,V:ToStr + Copy> T<K, V> {
|
pub impl<K:Eq + IterBytes + Hash + Copy + ToStr,V:ToStr + Copy> T<K, V> {
|
||||||
fn to_writer(&self, wr: io::Writer) {
|
fn to_writer(&self, wr: @io::Writer) {
|
||||||
if self.count == 0u {
|
if self.count == 0u {
|
||||||
wr.write_str(~"{}");
|
wr.write_str(~"{}");
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -14,10 +14,10 @@ use core::io::WriterUtil;
|
||||||
use core::io;
|
use core::io;
|
||||||
|
|
||||||
pub struct Serializer {
|
pub struct Serializer {
|
||||||
wr: io::Writer,
|
wr: @io::Writer,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn Serializer(wr: io::Writer) -> Serializer {
|
pub fn Serializer(wr: @io::Writer) -> Serializer {
|
||||||
Serializer { wr: wr }
|
Serializer { wr: wr }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -138,7 +138,7 @@ condition! {
|
||||||
bad_parse: () -> ();
|
bad_parse: () -> ();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_nonempty_prefix(rdr: io::Reader,
|
fn take_nonempty_prefix(rdr: @io::Reader,
|
||||||
ch: char,
|
ch: char,
|
||||||
pred: &fn(char) -> bool) -> (~str, char) {
|
pred: &fn(char) -> bool) -> (~str, char) {
|
||||||
let mut buf = ~"";
|
let mut buf = ~"";
|
||||||
|
@ -154,7 +154,7 @@ fn take_nonempty_prefix(rdr: io::Reader,
|
||||||
(buf, ch)
|
(buf, ch)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_num(rdr: io::Reader, ch: char) -> (uint, char) {
|
fn take_num(rdr: @io::Reader, ch: char) -> (uint, char) {
|
||||||
let (s, ch) = take_nonempty_prefix(rdr, ch, char::is_digit);
|
let (s, ch) = take_nonempty_prefix(rdr, ch, char::is_digit);
|
||||||
match uint::from_str(s) {
|
match uint::from_str(s) {
|
||||||
None => { bad_parse::cond.raise(()); (0, ch) },
|
None => { bad_parse::cond.raise(()); (0, ch) },
|
||||||
|
@ -162,7 +162,7 @@ fn take_num(rdr: io::Reader, ch: char) -> (uint, char) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_ident(rdr: io::Reader, ch: char) -> (Identifier, char) {
|
fn take_ident(rdr: @io::Reader, ch: char) -> (Identifier, char) {
|
||||||
let (s,ch) = take_nonempty_prefix(rdr, ch, char::is_alphanumeric);
|
let (s,ch) = take_nonempty_prefix(rdr, ch, char::is_alphanumeric);
|
||||||
if s.all(char::is_digit) {
|
if s.all(char::is_digit) {
|
||||||
match uint::from_str(s) {
|
match uint::from_str(s) {
|
||||||
|
@ -180,8 +180,7 @@ fn expect(ch: char, c: char) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_reader(rdr: io::Reader) -> Version {
|
fn parse_reader(rdr: @io::Reader) -> Version {
|
||||||
|
|
||||||
let (major, ch) = take_num(rdr, rdr.read_char());
|
let (major, ch) = take_num(rdr, rdr.read_char());
|
||||||
expect(ch, '.');
|
expect(ch, '.');
|
||||||
let (minor, ch) = take_num(rdr, rdr.read_char());
|
let (minor, ch) = take_num(rdr, rdr.read_char());
|
||||||
|
|
|
@ -893,6 +893,7 @@ mod test_tim_sort {
|
||||||
|
|
||||||
use sort::tim_sort;
|
use sort::tim_sort;
|
||||||
|
|
||||||
|
use core::rand::RngUtil;
|
||||||
use core::rand;
|
use core::rand;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
|
|
||||||
|
@ -990,6 +991,7 @@ mod big_tests {
|
||||||
|
|
||||||
use sort::*;
|
use sort::*;
|
||||||
|
|
||||||
|
use core::rand::RngUtil;
|
||||||
use core::rand;
|
use core::rand;
|
||||||
use core::task;
|
use core::task;
|
||||||
use core::uint;
|
use core::uint;
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
|
|
||||||
use core::os;
|
use core::os;
|
||||||
use core::prelude::*;
|
use core::prelude::*;
|
||||||
|
use core::rand::RngUtil;
|
||||||
use core::rand;
|
use core::rand;
|
||||||
|
|
||||||
pub fn mkdtemp(tmpdir: &Path, suffix: &str) -> Option<Path> {
|
pub fn mkdtemp(tmpdir: &Path, suffix: &str) -> Option<Path> {
|
||||||
|
|
|
@ -36,10 +36,10 @@ pub const color_bright_magenta: u8 = 13u8;
|
||||||
pub const color_bright_cyan: u8 = 14u8;
|
pub const color_bright_cyan: u8 = 14u8;
|
||||||
pub const color_bright_white: u8 = 15u8;
|
pub const color_bright_white: u8 = 15u8;
|
||||||
|
|
||||||
pub fn esc(writer: io::Writer) { writer.write(~[0x1bu8, '[' as u8]); }
|
pub fn esc(writer: @io::Writer) { writer.write(~[0x1bu8, '[' as u8]); }
|
||||||
|
|
||||||
/// Reset the foreground and background colors to default
|
/// Reset the foreground and background colors to default
|
||||||
pub fn reset(writer: io::Writer) {
|
pub fn reset(writer: @io::Writer) {
|
||||||
esc(writer);
|
esc(writer);
|
||||||
writer.write(~['0' as u8, 'm' as u8]);
|
writer.write(~['0' as u8, 'm' as u8]);
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ pub fn color_supported() -> bool {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_color(writer: io::Writer, first_char: u8, color: u8) {
|
pub fn set_color(writer: @io::Writer, first_char: u8, color: u8) {
|
||||||
fail_unless!((color < 16u8));
|
fail_unless!((color < 16u8));
|
||||||
esc(writer);
|
esc(writer);
|
||||||
let mut color = color;
|
let mut color = color;
|
||||||
|
@ -68,12 +68,12 @@ pub fn set_color(writer: io::Writer, first_char: u8, color: u8) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the foreground color
|
/// Set the foreground color
|
||||||
pub fn fg(writer: io::Writer, color: u8) {
|
pub fn fg(writer: @io::Writer, color: u8) {
|
||||||
return set_color(writer, '3' as u8, color);
|
return set_color(writer, '3' as u8, color);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the background color
|
/// Set the background color
|
||||||
pub fn bg(writer: io::Writer, color: u8) {
|
pub fn bg(writer: @io::Writer, color: u8) {
|
||||||
return set_color(writer, '4' as u8, color);
|
return set_color(writer, '4' as u8, color);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -198,8 +198,8 @@ pub struct BenchSamples {
|
||||||
pub enum TestResult { TrOk, TrFailed, TrIgnored, TrBench(BenchSamples) }
|
pub enum TestResult { TrOk, TrFailed, TrIgnored, TrBench(BenchSamples) }
|
||||||
|
|
||||||
struct ConsoleTestState {
|
struct ConsoleTestState {
|
||||||
out: io::Writer,
|
out: @io::Writer,
|
||||||
log_out: Option<io::Writer>,
|
log_out: Option<@io::Writer>,
|
||||||
use_color: bool,
|
use_color: bool,
|
||||||
mut total: uint,
|
mut total: uint,
|
||||||
mut passed: uint,
|
mut passed: uint,
|
||||||
|
@ -316,7 +316,7 @@ pub fn run_tests_console(opts: &TestOpts,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_log(out: io::Writer, result: TestResult, test: &TestDesc) {
|
fn write_log(out: @io::Writer, result: TestResult, test: &TestDesc) {
|
||||||
out.write_line(fmt!("%s %s",
|
out.write_line(fmt!("%s %s",
|
||||||
match result {
|
match result {
|
||||||
TrOk => ~"ok",
|
TrOk => ~"ok",
|
||||||
|
@ -326,23 +326,26 @@ pub fn run_tests_console(opts: &TestOpts,
|
||||||
}, test.name.to_str()));
|
}, test.name.to_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_ok(out: io::Writer, use_color: bool) {
|
fn write_ok(out: @io::Writer, use_color: bool) {
|
||||||
write_pretty(out, ~"ok", term::color_green, use_color);
|
write_pretty(out, ~"ok", term::color_green, use_color);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_failed(out: io::Writer, use_color: bool) {
|
fn write_failed(out: @io::Writer, use_color: bool) {
|
||||||
write_pretty(out, ~"FAILED", term::color_red, use_color);
|
write_pretty(out, ~"FAILED", term::color_red, use_color);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_ignored(out: io::Writer, use_color: bool) {
|
fn write_ignored(out: @io::Writer, use_color: bool) {
|
||||||
write_pretty(out, ~"ignored", term::color_yellow, use_color);
|
write_pretty(out, ~"ignored", term::color_yellow, use_color);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_bench(out: io::Writer, use_color: bool) {
|
fn write_bench(out: @io::Writer, use_color: bool) {
|
||||||
write_pretty(out, ~"bench", term::color_cyan, use_color);
|
write_pretty(out, ~"bench", term::color_cyan, use_color);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_pretty(out: io::Writer, word: &str, color: u8, use_color: bool) {
|
fn write_pretty(out: @io::Writer,
|
||||||
|
word: &str,
|
||||||
|
color: u8,
|
||||||
|
use_color: bool) {
|
||||||
if use_color && term::color_supported() {
|
if use_color && term::color_supported() {
|
||||||
term::fg(out, color);
|
term::fg(out, color);
|
||||||
}
|
}
|
||||||
|
@ -601,6 +604,7 @@ pub mod bench {
|
||||||
use stats::Stats;
|
use stats::Stats;
|
||||||
|
|
||||||
use core::num;
|
use core::num;
|
||||||
|
use core::rand::RngUtil;
|
||||||
use core::rand;
|
use core::rand;
|
||||||
use core::u64;
|
use core::u64;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
|
@ -700,7 +704,6 @@ pub mod bench {
|
||||||
let mut prev_madp = 0.0;
|
let mut prev_madp = 0.0;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
|
||||||
let n_samples = rng.gen_uint_range(50, 60);
|
let n_samples = rng.gen_uint_range(50, 60);
|
||||||
let n_iter = rng.gen_uint_range(magnitude,
|
let n_iter = rng.gen_uint_range(magnitude,
|
||||||
magnitude * 2);
|
magnitude * 2);
|
||||||
|
|
|
@ -179,6 +179,7 @@ mod test {
|
||||||
use uv;
|
use uv;
|
||||||
|
|
||||||
use core::iter;
|
use core::iter;
|
||||||
|
use core::rand::RngUtil;
|
||||||
use core::rand;
|
use core::rand;
|
||||||
use core::task;
|
use core::task;
|
||||||
use core::pipes::{stream, SharedChan};
|
use core::pipes::{stream, SharedChan};
|
||||||
|
|
|
@ -706,6 +706,7 @@ fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>,
|
||||||
mod test_treemap {
|
mod test_treemap {
|
||||||
use core::prelude::*;
|
use core::prelude::*;
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use core::rand::RngUtil;
|
||||||
use core::rand;
|
use core::rand;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -110,7 +110,7 @@ pub struct Ctx {
|
||||||
map: @map,
|
map: @map,
|
||||||
path: path,
|
path: path,
|
||||||
local_id: uint,
|
local_id: uint,
|
||||||
diag: span_handler,
|
diag: @span_handler,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type vt = visit::vt<@mut Ctx>;
|
pub type vt = visit::vt<@mut Ctx>;
|
||||||
|
@ -132,7 +132,7 @@ pub fn mk_ast_map_visitor() -> vt {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_crate(diag: span_handler, c: crate) -> map {
|
pub fn map_crate(diag: @span_handler, c: crate) -> map {
|
||||||
let cx = @mut Ctx {
|
let cx = @mut Ctx {
|
||||||
map: @std::oldmap::HashMap(),
|
map: @std::oldmap::HashMap(),
|
||||||
path: ~[],
|
path: ~[],
|
||||||
|
@ -146,7 +146,7 @@ pub fn map_crate(diag: span_handler, c: crate) -> map {
|
||||||
// Used for items loaded from external crate that are being inlined into this
|
// Used for items loaded from external crate that are being inlined into this
|
||||||
// crate. The `path` should be the path to the item but should not include
|
// crate. The `path` should be the path to the item but should not include
|
||||||
// the item itself.
|
// the item itself.
|
||||||
pub fn map_decoded_item(diag: span_handler,
|
pub fn map_decoded_item(diag: @span_handler,
|
||||||
map: map,
|
map: map,
|
||||||
+path: path,
|
+path: path,
|
||||||
ii: inlined_item) {
|
ii: inlined_item) {
|
||||||
|
|
|
@ -360,7 +360,7 @@ pub fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn require_unique_names(diagnostic: span_handler,
|
pub fn require_unique_names(diagnostic: @span_handler,
|
||||||
metas: &[@ast::meta_item]) {
|
metas: &[@ast::meta_item]) {
|
||||||
let mut set = LinearSet::new();
|
let mut set = LinearSet::new();
|
||||||
for metas.each |meta| {
|
for metas.each |meta| {
|
||||||
|
|
|
@ -55,7 +55,7 @@ pub trait span_handler {
|
||||||
fn span_note(@mut self, sp: span, msg: &str);
|
fn span_note(@mut self, sp: span, msg: &str);
|
||||||
fn span_bug(@mut self, sp: span, msg: &str) -> !;
|
fn span_bug(@mut self, sp: span, msg: &str) -> !;
|
||||||
fn span_unimpl(@mut self, sp: span, msg: &str) -> !;
|
fn span_unimpl(@mut self, sp: span, msg: &str) -> !;
|
||||||
fn handler(@mut self) -> handler;
|
fn handler(@mut self) -> @handler;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct HandlerT {
|
struct HandlerT {
|
||||||
|
@ -64,7 +64,7 @@ struct HandlerT {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CodemapT {
|
struct CodemapT {
|
||||||
handler: handler,
|
handler: @handler,
|
||||||
cm: @codemap::CodeMap,
|
cm: @codemap::CodeMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ impl span_handler for CodemapT {
|
||||||
fn span_unimpl(@mut self, sp: span, msg: &str) -> ! {
|
fn span_unimpl(@mut self, sp: span, msg: &str) -> ! {
|
||||||
self.span_bug(sp, ~"unimplemented " + msg);
|
self.span_bug(sp, ~"unimplemented " + msg);
|
||||||
}
|
}
|
||||||
fn handler(@mut self) -> handler {
|
fn handler(@mut self) -> @handler {
|
||||||
self.handler
|
self.handler
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,8 +143,8 @@ pub fn ice_msg(msg: &str) -> ~str {
|
||||||
fmt!("internal compiler error: %s", msg)
|
fmt!("internal compiler error: %s", msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_span_handler(handler: handler, cm: @codemap::CodeMap)
|
pub fn mk_span_handler(handler: @handler, cm: @codemap::CodeMap)
|
||||||
-> span_handler {
|
-> @span_handler {
|
||||||
@mut CodemapT { handler: handler, cm: cm } as @span_handler
|
@mut CodemapT { handler: handler, cm: cm } as @span_handler
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -304,7 +304,7 @@ fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect<T:Copy>(diag: span_handler,
|
pub fn expect<T:Copy>(diag: @span_handler,
|
||||||
opt: Option<T>,
|
opt: Option<T>,
|
||||||
msg: &fn() -> ~str) -> T {
|
msg: &fn() -> ~str) -> T {
|
||||||
match opt {
|
match opt {
|
||||||
|
|
|
@ -41,10 +41,10 @@ fn next_state(s: State) -> Option<State> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_asm(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_asm(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
|
let p = parse::new_parser_from_tts(cx.parse_sess(),
|
||||||
let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(),
|
cx.cfg(),
|
||||||
vec::from_slice(tts));
|
vec::from_slice(tts));
|
||||||
|
|
||||||
let mut asm = ~"";
|
let mut asm = ~"";
|
||||||
|
|
|
@ -110,7 +110,7 @@ mod syntax {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_auto_encode(
|
pub fn expand_auto_encode(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
_mitem: @ast::meta_item,
|
_mitem: @ast::meta_item,
|
||||||
in_items: ~[@ast::item]
|
in_items: ~[@ast::item]
|
||||||
|
@ -165,7 +165,7 @@ pub fn expand_auto_encode(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_auto_decode(
|
pub fn expand_auto_decode(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
_mitem: @ast::meta_item,
|
_mitem: @ast::meta_item,
|
||||||
in_items: ~[@ast::item]
|
in_items: ~[@ast::item]
|
||||||
|
@ -219,7 +219,7 @@ pub fn expand_auto_decode(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
priv impl ext_ctxt {
|
priv impl @ext_ctxt {
|
||||||
fn bind_path(
|
fn bind_path(
|
||||||
&self,
|
&self,
|
||||||
span: span,
|
span: span,
|
||||||
|
@ -426,7 +426,7 @@ priv impl ext_ctxt {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_impl(
|
fn mk_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
ty_param: ast::TyParam,
|
ty_param: ast::TyParam,
|
||||||
|
@ -499,7 +499,7 @@ fn mk_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_ser_impl(
|
fn mk_ser_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
generics: &ast::Generics,
|
generics: &ast::Generics,
|
||||||
|
@ -543,7 +543,7 @@ fn mk_ser_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_deser_impl(
|
fn mk_deser_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
generics: &ast::Generics,
|
generics: &ast::Generics,
|
||||||
|
@ -587,7 +587,7 @@ fn mk_deser_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_ser_method(
|
fn mk_ser_method(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
+ser_body: ast::blk
|
+ser_body: ast::blk
|
||||||
) -> @ast::method {
|
) -> @ast::method {
|
||||||
|
@ -647,7 +647,7 @@ fn mk_ser_method(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_deser_method(
|
fn mk_deser_method(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ty: @ast::Ty,
|
ty: @ast::Ty,
|
||||||
+deser_body: ast::blk
|
+deser_body: ast::blk
|
||||||
|
@ -701,7 +701,7 @@ fn mk_deser_method(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_struct_ser_impl(
|
fn mk_struct_ser_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
fields: &[@ast::struct_field],
|
fields: &[@ast::struct_field],
|
||||||
|
@ -762,7 +762,7 @@ fn mk_struct_ser_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_struct_deser_impl(
|
fn mk_struct_deser_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
fields: ~[@ast::struct_field],
|
fields: ~[@ast::struct_field],
|
||||||
|
@ -866,7 +866,7 @@ fn mk_struct_fields(fields: &[@ast::struct_field]) -> ~[field] {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_enum_ser_impl(
|
fn mk_enum_ser_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
+enum_def: ast::enum_def,
|
+enum_def: ast::enum_def,
|
||||||
|
@ -883,7 +883,7 @@ fn mk_enum_ser_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_enum_deser_impl(
|
fn mk_enum_deser_impl(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
+enum_def: ast::enum_def,
|
+enum_def: ast::enum_def,
|
||||||
|
@ -900,7 +900,7 @@ fn mk_enum_deser_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ser_variant(
|
fn ser_variant(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
v_name: ast::ident,
|
v_name: ast::ident,
|
||||||
v_idx: uint,
|
v_idx: uint,
|
||||||
|
@ -982,7 +982,7 @@ fn ser_variant(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_enum_ser_body(
|
fn mk_enum_ser_body(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
name: ast::ident,
|
name: ast::ident,
|
||||||
+variants: ~[ast::variant]
|
+variants: ~[ast::variant]
|
||||||
|
@ -1032,7 +1032,7 @@ fn mk_enum_ser_body(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_enum_deser_variant_nary(
|
fn mk_enum_deser_variant_nary(
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
name: ast::ident,
|
name: ast::ident,
|
||||||
args: ~[ast::variant_arg]
|
args: ~[ast::variant_arg]
|
||||||
|
@ -1069,7 +1069,7 @@ fn mk_enum_deser_variant_nary(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_enum_deser_body(
|
fn mk_enum_deser_body(
|
||||||
ext_cx: ext_ctxt,
|
ext_cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
name: ast::ident,
|
name: ast::ident,
|
||||||
variants: ~[ast::variant]
|
variants: ~[ast::variant]
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub struct MacroDef {
|
||||||
ext: SyntaxExtension
|
ext: SyntaxExtension
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type ItemDecorator = @fn(ext_ctxt,
|
pub type ItemDecorator = @fn(@ext_ctxt,
|
||||||
span,
|
span,
|
||||||
@ast::meta_item,
|
@ast::meta_item,
|
||||||
~[@ast::item])
|
~[@ast::item])
|
||||||
|
@ -47,7 +47,7 @@ pub struct SyntaxExpanderTT {
|
||||||
span: Option<span>
|
span: Option<span>
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type SyntaxExpanderTTFun = @fn(ext_ctxt,
|
pub type SyntaxExpanderTTFun = @fn(@ext_ctxt,
|
||||||
span,
|
span,
|
||||||
&[ast::token_tree])
|
&[ast::token_tree])
|
||||||
-> MacResult;
|
-> MacResult;
|
||||||
|
@ -57,7 +57,7 @@ pub struct SyntaxExpanderTTItem {
|
||||||
span: Option<span>
|
span: Option<span>
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type SyntaxExpanderTTItemFun = @fn(ext_ctxt,
|
pub type SyntaxExpanderTTItemFun = @fn(@ext_ctxt,
|
||||||
span,
|
span,
|
||||||
ast::ident,
|
ast::ident,
|
||||||
~[ast::token_tree])
|
~[ast::token_tree])
|
||||||
|
@ -238,8 +238,8 @@ pub trait ext_ctxt {
|
||||||
fn ident_of(@mut self, st: ~str) -> ast::ident;
|
fn ident_of(@mut self, st: ~str) -> ast::ident;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_ctxt(parse_sess: @mut parse::ParseSess,
|
pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, +cfg: ast::crate_cfg)
|
||||||
+cfg: ast::crate_cfg) -> ext_ctxt {
|
-> @ext_ctxt {
|
||||||
struct CtxtRepr {
|
struct CtxtRepr {
|
||||||
parse_sess: @mut parse::ParseSess,
|
parse_sess: @mut parse::ParseSess,
|
||||||
cfg: ast::crate_cfg,
|
cfg: ast::crate_cfg,
|
||||||
|
@ -333,7 +333,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess,
|
||||||
((imp) as @ext_ctxt)
|
((imp) as @ext_ctxt)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str {
|
pub fn expr_to_str(cx: @ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str {
|
||||||
match expr.node {
|
match expr.node {
|
||||||
ast::expr_lit(l) => match l.node {
|
ast::expr_lit(l) => match l.node {
|
||||||
ast::lit_str(s) => copy *s,
|
ast::lit_str(s) => copy *s,
|
||||||
|
@ -343,7 +343,7 @@ pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_to_ident(cx: ext_ctxt,
|
pub fn expr_to_ident(cx: @ext_ctxt,
|
||||||
expr: @ast::expr,
|
expr: @ast::expr,
|
||||||
err_msg: ~str) -> ast::ident {
|
err_msg: ~str) -> ast::ident {
|
||||||
match expr.node {
|
match expr.node {
|
||||||
|
@ -357,14 +357,14 @@ pub fn expr_to_ident(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_zero_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree],
|
pub fn check_zero_tts(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree],
|
||||||
name: &str) {
|
name: &str) {
|
||||||
if tts.len() != 0 {
|
if tts.len() != 0 {
|
||||||
cx.span_fatal(sp, fmt!("%s takes no arguments", name));
|
cx.span_fatal(sp, fmt!("%s takes no arguments", name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_single_str_from_tts(cx: ext_ctxt,
|
pub fn get_single_str_from_tts(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree],
|
tts: &[ast::token_tree],
|
||||||
name: &str) -> ~str {
|
name: &str) -> ~str {
|
||||||
|
@ -379,7 +379,7 @@ pub fn get_single_str_from_tts(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_exprs_from_tts(cx: ext_ctxt, tts: &[ast::token_tree])
|
pub fn get_exprs_from_tts(cx: @ext_ctxt, tts: &[ast::token_tree])
|
||||||
-> ~[@ast::expr] {
|
-> ~[@ast::expr] {
|
||||||
let p = parse::new_parser_from_tts(cx.parse_sess(),
|
let p = parse::new_parser_from_tts(cx.parse_sess(),
|
||||||
cx.cfg(),
|
cx.cfg(),
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub struct Field {
|
||||||
ex: @ast::expr
|
ex: @ast::expr
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_expr(cx: ext_ctxt,
|
pub fn mk_expr(cx: @ext_ctxt,
|
||||||
sp: codemap::span,
|
sp: codemap::span,
|
||||||
+expr: ast::expr_)
|
+expr: ast::expr_)
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
|
@ -37,28 +37,28 @@ pub fn mk_expr(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_lit(cx: ext_ctxt, sp: span, lit: ast::lit_) -> @ast::expr {
|
pub fn mk_lit(cx: @ext_ctxt, sp: span, lit: ast::lit_) -> @ast::expr {
|
||||||
let sp_lit = @codemap::spanned { node: lit, span: sp };
|
let sp_lit = @codemap::spanned { node: lit, span: sp };
|
||||||
mk_expr(cx, sp, ast::expr_lit(sp_lit))
|
mk_expr(cx, sp, ast::expr_lit(sp_lit))
|
||||||
}
|
}
|
||||||
pub fn mk_int(cx: ext_ctxt, sp: span, i: int) -> @ast::expr {
|
pub fn mk_int(cx: @ext_ctxt, sp: span, i: int) -> @ast::expr {
|
||||||
let lit = ast::lit_int(i as i64, ast::ty_i);
|
let lit = ast::lit_int(i as i64, ast::ty_i);
|
||||||
return mk_lit(cx, sp, lit);
|
return mk_lit(cx, sp, lit);
|
||||||
}
|
}
|
||||||
pub fn mk_uint(cx: ext_ctxt, sp: span, u: uint) -> @ast::expr {
|
pub fn mk_uint(cx: @ext_ctxt, sp: span, u: uint) -> @ast::expr {
|
||||||
let lit = ast::lit_uint(u as u64, ast::ty_u);
|
let lit = ast::lit_uint(u as u64, ast::ty_u);
|
||||||
return mk_lit(cx, sp, lit);
|
return mk_lit(cx, sp, lit);
|
||||||
}
|
}
|
||||||
pub fn mk_u8(cx: ext_ctxt, sp: span, u: u8) -> @ast::expr {
|
pub fn mk_u8(cx: @ext_ctxt, sp: span, u: u8) -> @ast::expr {
|
||||||
let lit = ast::lit_uint(u as u64, ast::ty_u8);
|
let lit = ast::lit_uint(u as u64, ast::ty_u8);
|
||||||
return mk_lit(cx, sp, lit);
|
return mk_lit(cx, sp, lit);
|
||||||
}
|
}
|
||||||
pub fn mk_binary(cx: ext_ctxt, sp: span, op: ast::binop,
|
pub fn mk_binary(cx: @ext_ctxt, sp: span, op: ast::binop,
|
||||||
lhs: @ast::expr, rhs: @ast::expr) -> @ast::expr {
|
lhs: @ast::expr, rhs: @ast::expr) -> @ast::expr {
|
||||||
cx.next_id(); // see ast_util::op_expr_callee_id
|
cx.next_id(); // see ast_util::op_expr_callee_id
|
||||||
mk_expr(cx, sp, ast::expr_binary(op, lhs, rhs))
|
mk_expr(cx, sp, ast::expr_binary(op, lhs, rhs))
|
||||||
}
|
}
|
||||||
pub fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr)
|
pub fn mk_unary(cx: @ext_ctxt, sp: span, op: ast::unop, e: @ast::expr)
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
cx.next_id(); // see ast_util::op_expr_callee_id
|
cx.next_id(); // see ast_util::op_expr_callee_id
|
||||||
mk_expr(cx, sp, ast::expr_unary(op, e))
|
mk_expr(cx, sp, ast::expr_unary(op, e))
|
||||||
|
@ -88,69 +88,70 @@ pub fn mk_raw_path_global(sp: span, +idents: ~[ast::ident]) -> @ast::path {
|
||||||
rp: None,
|
rp: None,
|
||||||
types: ~[] }
|
types: ~[] }
|
||||||
}
|
}
|
||||||
pub fn mk_path(cx: ext_ctxt, sp: span, +idents: ~[ast::ident]) -> @ast::expr {
|
pub fn mk_path(cx: @ext_ctxt, sp: span, +idents: ~[ast::ident])
|
||||||
|
-> @ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_path(mk_raw_path(sp, idents)))
|
mk_expr(cx, sp, ast::expr_path(mk_raw_path(sp, idents)))
|
||||||
}
|
}
|
||||||
pub fn mk_path_global(cx: ext_ctxt, sp: span, +idents: ~[ast::ident])
|
pub fn mk_path_global(cx: @ext_ctxt, sp: span, +idents: ~[ast::ident])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_path(mk_raw_path_global(sp, idents)))
|
mk_expr(cx, sp, ast::expr_path(mk_raw_path_global(sp, idents)))
|
||||||
}
|
}
|
||||||
pub fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident)
|
pub fn mk_access_(cx: @ext_ctxt, sp: span, p: @ast::expr, m: ast::ident)
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_field(p, m, ~[]))
|
mk_expr(cx, sp, ast::expr_field(p, m, ~[]))
|
||||||
}
|
}
|
||||||
pub fn mk_access(cx: ext_ctxt, sp: span, +p: ~[ast::ident], m: ast::ident)
|
pub fn mk_access(cx: @ext_ctxt, sp: span, +p: ~[ast::ident], m: ast::ident)
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
let pathexpr = mk_path(cx, sp, p);
|
let pathexpr = mk_path(cx, sp, p);
|
||||||
return mk_access_(cx, sp, pathexpr, m);
|
return mk_access_(cx, sp, pathexpr, m);
|
||||||
}
|
}
|
||||||
pub fn mk_addr_of(cx: ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
|
pub fn mk_addr_of(cx: @ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
|
||||||
return mk_expr(cx, sp, ast::expr_addr_of(ast::m_imm, e));
|
return mk_expr(cx, sp, ast::expr_addr_of(ast::m_imm, e));
|
||||||
}
|
}
|
||||||
pub fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr,
|
pub fn mk_call_(cx: @ext_ctxt, sp: span, fn_expr: @ast::expr,
|
||||||
+args: ~[@ast::expr]) -> @ast::expr {
|
+args: ~[@ast::expr]) -> @ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_call(fn_expr, args, ast::NoSugar))
|
mk_expr(cx, sp, ast::expr_call(fn_expr, args, ast::NoSugar))
|
||||||
}
|
}
|
||||||
pub fn mk_call(cx: ext_ctxt, sp: span, +fn_path: ~[ast::ident],
|
pub fn mk_call(cx: @ext_ctxt, sp: span, +fn_path: ~[ast::ident],
|
||||||
+args: ~[@ast::expr]) -> @ast::expr {
|
+args: ~[@ast::expr]) -> @ast::expr {
|
||||||
let pathexpr = mk_path(cx, sp, fn_path);
|
let pathexpr = mk_path(cx, sp, fn_path);
|
||||||
return mk_call_(cx, sp, pathexpr, args);
|
return mk_call_(cx, sp, pathexpr, args);
|
||||||
}
|
}
|
||||||
pub fn mk_call_global(cx: ext_ctxt, sp: span, +fn_path: ~[ast::ident],
|
pub fn mk_call_global(cx: @ext_ctxt, sp: span, +fn_path: ~[ast::ident],
|
||||||
+args: ~[@ast::expr]) -> @ast::expr {
|
+args: ~[@ast::expr]) -> @ast::expr {
|
||||||
let pathexpr = mk_path_global(cx, sp, fn_path);
|
let pathexpr = mk_path_global(cx, sp, fn_path);
|
||||||
return mk_call_(cx, sp, pathexpr, args);
|
return mk_call_(cx, sp, pathexpr, args);
|
||||||
}
|
}
|
||||||
// e = expr, t = type
|
// e = expr, t = type
|
||||||
pub fn mk_base_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
pub fn mk_base_vec_e(cx: @ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
let vecexpr = ast::expr_vec(exprs, ast::m_imm);
|
let vecexpr = ast::expr_vec(exprs, ast::m_imm);
|
||||||
mk_expr(cx, sp, vecexpr)
|
mk_expr(cx, sp, vecexpr)
|
||||||
}
|
}
|
||||||
pub fn mk_vstore_e(cx: ext_ctxt, sp: span, expr: @ast::expr,
|
pub fn mk_vstore_e(cx: @ext_ctxt, sp: span, expr: @ast::expr,
|
||||||
vst: ast::expr_vstore) ->
|
vst: ast::expr_vstore) ->
|
||||||
@ast::expr {
|
@ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_vstore(expr, vst))
|
mk_expr(cx, sp, ast::expr_vstore(expr, vst))
|
||||||
}
|
}
|
||||||
pub fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
pub fn mk_uniq_vec_e(cx: @ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_uniq)
|
mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_uniq)
|
||||||
}
|
}
|
||||||
pub fn mk_slice_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
pub fn mk_slice_vec_e(cx: @ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs),
|
mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs),
|
||||||
ast::expr_vstore_slice)
|
ast::expr_vstore_slice)
|
||||||
}
|
}
|
||||||
pub fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
pub fn mk_fixed_vec_e(cx: @ext_ctxt, sp: span, +exprs: ~[@ast::expr])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs),
|
mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs),
|
||||||
ast::expr_vstore_fixed(None))
|
ast::expr_vstore_fixed(None))
|
||||||
}
|
}
|
||||||
pub fn mk_base_str(cx: ext_ctxt, sp: span, +s: ~str) -> @ast::expr {
|
pub fn mk_base_str(cx: @ext_ctxt, sp: span, +s: ~str) -> @ast::expr {
|
||||||
let lit = ast::lit_str(@s);
|
let lit = ast::lit_str(@s);
|
||||||
return mk_lit(cx, sp, lit);
|
return mk_lit(cx, sp, lit);
|
||||||
}
|
}
|
||||||
pub fn mk_uniq_str(cx: ext_ctxt, sp: span, +s: ~str) -> @ast::expr {
|
pub fn mk_uniq_str(cx: @ext_ctxt, sp: span, +s: ~str) -> @ast::expr {
|
||||||
mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::expr_vstore_uniq)
|
mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::expr_vstore_uniq)
|
||||||
}
|
}
|
||||||
pub fn mk_field(sp: span, f: &Field) -> ast::field {
|
pub fn mk_field(sp: span, f: &Field) -> ast::field {
|
||||||
|
@ -162,7 +163,7 @@ pub fn mk_field(sp: span, f: &Field) -> ast::field {
|
||||||
pub fn mk_fields(sp: span, fields: ~[Field]) -> ~[ast::field] {
|
pub fn mk_fields(sp: span, fields: ~[Field]) -> ~[ast::field] {
|
||||||
fields.map(|f| mk_field(sp, f))
|
fields.map(|f| mk_field(sp, f))
|
||||||
}
|
}
|
||||||
pub fn mk_struct_e(cx: ext_ctxt,
|
pub fn mk_struct_e(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
+ctor_path: ~[ast::ident],
|
+ctor_path: ~[ast::ident],
|
||||||
+fields: ~[Field])
|
+fields: ~[Field])
|
||||||
|
@ -172,7 +173,7 @@ pub fn mk_struct_e(cx: ext_ctxt,
|
||||||
mk_fields(sp, fields),
|
mk_fields(sp, fields),
|
||||||
option::None::<@ast::expr>))
|
option::None::<@ast::expr>))
|
||||||
}
|
}
|
||||||
pub fn mk_global_struct_e(cx: ext_ctxt,
|
pub fn mk_global_struct_e(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
+ctor_path: ~[ast::ident],
|
+ctor_path: ~[ast::ident],
|
||||||
+fields: ~[Field])
|
+fields: ~[Field])
|
||||||
|
@ -182,7 +183,7 @@ pub fn mk_global_struct_e(cx: ext_ctxt,
|
||||||
mk_fields(sp, fields),
|
mk_fields(sp, fields),
|
||||||
option::None::<@ast::expr>))
|
option::None::<@ast::expr>))
|
||||||
}
|
}
|
||||||
pub fn mk_glob_use(cx: ext_ctxt,
|
pub fn mk_glob_use(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
+path: ~[ast::ident]) -> @ast::view_item {
|
+path: ~[ast::ident]) -> @ast::view_item {
|
||||||
let glob = @codemap::spanned {
|
let glob = @codemap::spanned {
|
||||||
|
@ -194,7 +195,7 @@ pub fn mk_glob_use(cx: ext_ctxt,
|
||||||
vis: ast::private,
|
vis: ast::private,
|
||||||
span: sp }
|
span: sp }
|
||||||
}
|
}
|
||||||
pub fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool,
|
pub fn mk_local(cx: @ext_ctxt, sp: span, mutbl: bool,
|
||||||
ident: ast::ident, ex: @ast::expr) -> @ast::stmt {
|
ident: ast::ident, ex: @ast::expr) -> @ast::stmt {
|
||||||
|
|
||||||
let pat = @ast::pat {
|
let pat = @ast::pat {
|
||||||
|
@ -219,7 +220,7 @@ pub fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool,
|
||||||
let decl = codemap::spanned {node: ast::decl_local(~[local]), span: sp};
|
let decl = codemap::spanned {node: ast::decl_local(~[local]), span: sp};
|
||||||
@codemap::spanned { node: ast::stmt_decl(@decl, cx.next_id()), span: sp }
|
@codemap::spanned { node: ast::stmt_decl(@decl, cx.next_id()), span: sp }
|
||||||
}
|
}
|
||||||
pub fn mk_block(cx: ext_ctxt, span: span,
|
pub fn mk_block(cx: @ext_ctxt, span: span,
|
||||||
+view_items: ~[@ast::view_item],
|
+view_items: ~[@ast::view_item],
|
||||||
+stmts: ~[@ast::stmt],
|
+stmts: ~[@ast::stmt],
|
||||||
expr: Option<@ast::expr>) -> @ast::expr {
|
expr: Option<@ast::expr>) -> @ast::expr {
|
||||||
|
@ -235,7 +236,7 @@ pub fn mk_block(cx: ext_ctxt, span: span,
|
||||||
};
|
};
|
||||||
mk_expr(cx, span, ast::expr_block(blk))
|
mk_expr(cx, span, ast::expr_block(blk))
|
||||||
}
|
}
|
||||||
pub fn mk_block_(cx: ext_ctxt,
|
pub fn mk_block_(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
+stmts: ~[@ast::stmt])
|
+stmts: ~[@ast::stmt])
|
||||||
-> ast::blk {
|
-> ast::blk {
|
||||||
|
@ -250,7 +251,7 @@ pub fn mk_block_(cx: ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn mk_simple_block(cx: ext_ctxt,
|
pub fn mk_simple_block(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
expr: @ast::expr)
|
expr: @ast::expr)
|
||||||
-> ast::blk {
|
-> ast::blk {
|
||||||
|
@ -265,21 +266,21 @@ pub fn mk_simple_block(cx: ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn mk_copy(cx: ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
|
pub fn mk_copy(cx: @ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_copy(e))
|
mk_expr(cx, sp, ast::expr_copy(e))
|
||||||
}
|
}
|
||||||
pub fn mk_managed(cx: ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
|
pub fn mk_managed(cx: @ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr {
|
||||||
mk_expr(cx, sp, ast::expr_unary(ast::box(ast::m_imm), e))
|
mk_expr(cx, sp, ast::expr_unary(ast::box(ast::m_imm), e))
|
||||||
}
|
}
|
||||||
pub fn mk_pat(cx: ext_ctxt, span: span, +pat: ast::pat_) -> @ast::pat {
|
pub fn mk_pat(cx: @ext_ctxt, span: span, +pat: ast::pat_) -> @ast::pat {
|
||||||
@ast::pat { id: cx.next_id(), node: pat, span: span }
|
@ast::pat { id: cx.next_id(), node: pat, span: span }
|
||||||
}
|
}
|
||||||
pub fn mk_pat_ident(cx: ext_ctxt,
|
pub fn mk_pat_ident(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident) -> @ast::pat {
|
ident: ast::ident) -> @ast::pat {
|
||||||
mk_pat_ident_with_binding_mode(cx, span, ident, ast::bind_by_copy)
|
mk_pat_ident_with_binding_mode(cx, span, ident, ast::bind_by_copy)
|
||||||
}
|
}
|
||||||
pub fn mk_pat_ident_with_binding_mode(cx: ext_ctxt,
|
pub fn mk_pat_ident_with_binding_mode(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
bm: ast::binding_mode) -> @ast::pat {
|
bm: ast::binding_mode) -> @ast::pat {
|
||||||
|
@ -287,7 +288,7 @@ pub fn mk_pat_ident_with_binding_mode(cx: ext_ctxt,
|
||||||
let pat = ast::pat_ident(bm, path, None);
|
let pat = ast::pat_ident(bm, path, None);
|
||||||
mk_pat(cx, span, pat)
|
mk_pat(cx, span, pat)
|
||||||
}
|
}
|
||||||
pub fn mk_pat_enum(cx: ext_ctxt,
|
pub fn mk_pat_enum(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
path: @ast::path,
|
path: @ast::path,
|
||||||
+subpats: ~[@ast::pat])
|
+subpats: ~[@ast::pat])
|
||||||
|
@ -295,7 +296,7 @@ pub fn mk_pat_enum(cx: ext_ctxt,
|
||||||
let pat = ast::pat_enum(path, Some(subpats));
|
let pat = ast::pat_enum(path, Some(subpats));
|
||||||
mk_pat(cx, span, pat)
|
mk_pat(cx, span, pat)
|
||||||
}
|
}
|
||||||
pub fn mk_pat_struct(cx: ext_ctxt,
|
pub fn mk_pat_struct(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
path: @ast::path,
|
path: @ast::path,
|
||||||
+field_pats: ~[ast::field_pat])
|
+field_pats: ~[ast::field_pat])
|
||||||
|
@ -303,17 +304,17 @@ pub fn mk_pat_struct(cx: ext_ctxt,
|
||||||
let pat = ast::pat_struct(path, field_pats, false);
|
let pat = ast::pat_struct(path, field_pats, false);
|
||||||
mk_pat(cx, span, pat)
|
mk_pat(cx, span, pat)
|
||||||
}
|
}
|
||||||
pub fn mk_bool(cx: ext_ctxt, span: span, value: bool) -> @ast::expr {
|
pub fn mk_bool(cx: @ext_ctxt, span: span, value: bool) -> @ast::expr {
|
||||||
let lit_expr = ast::expr_lit(@codemap::spanned {
|
let lit_expr = ast::expr_lit(@codemap::spanned {
|
||||||
node: ast::lit_bool(value),
|
node: ast::lit_bool(value),
|
||||||
span: span });
|
span: span });
|
||||||
build::mk_expr(cx, span, lit_expr)
|
build::mk_expr(cx, span, lit_expr)
|
||||||
}
|
}
|
||||||
pub fn mk_stmt(cx: ext_ctxt, span: span, expr: @ast::expr) -> @ast::stmt {
|
pub fn mk_stmt(cx: @ext_ctxt, span: span, expr: @ast::expr) -> @ast::stmt {
|
||||||
let stmt_ = ast::stmt_semi(expr, cx.next_id());
|
let stmt_ = ast::stmt_semi(expr, cx.next_id());
|
||||||
@codemap::spanned { node: stmt_, span: span }
|
@codemap::spanned { node: stmt_, span: span }
|
||||||
}
|
}
|
||||||
pub fn mk_ty_path(cx: ext_ctxt,
|
pub fn mk_ty_path(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
+idents: ~[ ast::ident ])
|
+idents: ~[ ast::ident ])
|
||||||
-> @ast::Ty {
|
-> @ast::Ty {
|
||||||
|
@ -322,7 +323,7 @@ pub fn mk_ty_path(cx: ext_ctxt,
|
||||||
let ty = @ast::Ty { id: cx.next_id(), node: ty, span: span };
|
let ty = @ast::Ty { id: cx.next_id(), node: ty, span: span };
|
||||||
ty
|
ty
|
||||||
}
|
}
|
||||||
pub fn mk_ty_path_global(cx: ext_ctxt,
|
pub fn mk_ty_path_global(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
+idents: ~[ ast::ident ])
|
+idents: ~[ ast::ident ])
|
||||||
-> @ast::Ty {
|
-> @ast::Ty {
|
||||||
|
@ -331,13 +332,13 @@ pub fn mk_ty_path_global(cx: ext_ctxt,
|
||||||
let ty = @ast::Ty { id: cx.next_id(), node: ty, span: span };
|
let ty = @ast::Ty { id: cx.next_id(), node: ty, span: span };
|
||||||
ty
|
ty
|
||||||
}
|
}
|
||||||
pub fn mk_simple_ty_path(cx: ext_ctxt,
|
pub fn mk_simple_ty_path(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident)
|
ident: ast::ident)
|
||||||
-> @ast::Ty {
|
-> @ast::Ty {
|
||||||
mk_ty_path(cx, span, ~[ ident ])
|
mk_ty_path(cx, span, ~[ ident ])
|
||||||
}
|
}
|
||||||
pub fn mk_arg(cx: ext_ctxt,
|
pub fn mk_arg(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
ty: @ast::Ty)
|
ty: @ast::Ty)
|
||||||
|
@ -354,13 +355,13 @@ pub fn mk_arg(cx: ext_ctxt,
|
||||||
pub fn mk_fn_decl(+inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl {
|
pub fn mk_fn_decl(+inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl {
|
||||||
ast::fn_decl { inputs: inputs, output: output, cf: ast::return_val }
|
ast::fn_decl { inputs: inputs, output: output, cf: ast::return_val }
|
||||||
}
|
}
|
||||||
pub fn mk_ty_param(cx: ext_ctxt,
|
pub fn mk_ty_param(cx: @ext_ctxt,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
bounds: @OptVec<ast::TyParamBound>)
|
bounds: @OptVec<ast::TyParamBound>)
|
||||||
-> ast::TyParam {
|
-> ast::TyParam {
|
||||||
ast::TyParam { ident: ident, id: cx.next_id(), bounds: bounds }
|
ast::TyParam { ident: ident, id: cx.next_id(), bounds: bounds }
|
||||||
}
|
}
|
||||||
pub fn mk_lifetime(cx: ext_ctxt,
|
pub fn mk_lifetime(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident) -> ast::Lifetime
|
ident: ast::ident) -> ast::Lifetime
|
||||||
{
|
{
|
||||||
|
|
|
@ -16,7 +16,7 @@ use ext::base::*;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
use parse::token;
|
use parse::token;
|
||||||
|
|
||||||
pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_syntax_ext(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let mut res_str = ~"";
|
let mut res_str = ~"";
|
||||||
for tts.eachi |i, e| {
|
for tts.eachi |i, e| {
|
||||||
|
|
|
@ -45,18 +45,18 @@ pub impl Junction {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ExpandDerivingStructDefFn = &self/fn(ext_ctxt,
|
type ExpandDerivingStructDefFn = &self/fn(@ext_ctxt,
|
||||||
span,
|
span,
|
||||||
x: &struct_def,
|
x: &struct_def,
|
||||||
ident,
|
ident,
|
||||||
y: &Generics) -> @item;
|
y: &Generics) -> @item;
|
||||||
type ExpandDerivingEnumDefFn = &self/fn(ext_ctxt,
|
type ExpandDerivingEnumDefFn = &self/fn(@ext_ctxt,
|
||||||
span,
|
span,
|
||||||
x: &enum_def,
|
x: &enum_def,
|
||||||
ident,
|
ident,
|
||||||
y: &Generics) -> @item;
|
y: &Generics) -> @item;
|
||||||
|
|
||||||
pub fn expand_meta_deriving(cx: ext_ctxt,
|
pub fn expand_meta_deriving(cx: @ext_ctxt,
|
||||||
_span: span,
|
_span: span,
|
||||||
mitem: @meta_item,
|
mitem: @meta_item,
|
||||||
in_items: ~[@item])
|
in_items: ~[@item])
|
||||||
|
@ -98,7 +98,7 @@ pub fn expand_meta_deriving(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_deriving_eq(cx: ext_ctxt,
|
pub fn expand_deriving_eq(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
_mitem: @meta_item,
|
_mitem: @meta_item,
|
||||||
in_items: ~[@item])
|
in_items: ~[@item])
|
||||||
|
@ -110,7 +110,7 @@ pub fn expand_deriving_eq(cx: ext_ctxt,
|
||||||
expand_deriving_eq_enum_def)
|
expand_deriving_eq_enum_def)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_deriving_iter_bytes(cx: ext_ctxt,
|
pub fn expand_deriving_iter_bytes(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
_mitem: @meta_item,
|
_mitem: @meta_item,
|
||||||
in_items: ~[@item])
|
in_items: ~[@item])
|
||||||
|
@ -122,7 +122,7 @@ pub fn expand_deriving_iter_bytes(cx: ext_ctxt,
|
||||||
expand_deriving_iter_bytes_enum_def)
|
expand_deriving_iter_bytes_enum_def)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_deriving_clone(cx: ext_ctxt,
|
pub fn expand_deriving_clone(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
_: @meta_item,
|
_: @meta_item,
|
||||||
in_items: ~[@item])
|
in_items: ~[@item])
|
||||||
|
@ -134,7 +134,7 @@ pub fn expand_deriving_clone(cx: ext_ctxt,
|
||||||
expand_deriving_clone_enum_def)
|
expand_deriving_clone_enum_def)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving(cx: ext_ctxt,
|
fn expand_deriving(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
in_items: ~[@item],
|
in_items: ~[@item],
|
||||||
expand_deriving_struct_def: ExpandDerivingStructDefFn,
|
expand_deriving_struct_def: ExpandDerivingStructDefFn,
|
||||||
|
@ -164,7 +164,7 @@ fn expand_deriving(cx: ext_ctxt,
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_impl_item(cx: ext_ctxt, span: span, +item: item_) -> @item {
|
fn create_impl_item(cx: @ext_ctxt, span: span, +item: item_) -> @item {
|
||||||
@ast::item {
|
@ast::item {
|
||||||
ident: clownshoes_extensions,
|
ident: clownshoes_extensions,
|
||||||
attrs: ~[],
|
attrs: ~[],
|
||||||
|
@ -177,7 +177,7 @@ fn create_impl_item(cx: ext_ctxt, span: span, +item: item_) -> @item {
|
||||||
|
|
||||||
/// Creates a method from the given expression, the signature of which
|
/// Creates a method from the given expression, the signature of which
|
||||||
/// conforms to the `eq` or `ne` method.
|
/// conforms to the `eq` or `ne` method.
|
||||||
fn create_eq_method(cx: ext_ctxt,
|
fn create_eq_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
method_ident: ident,
|
method_ident: ident,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -236,7 +236,7 @@ fn create_eq_method(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_self_type_with_params(cx: ext_ctxt,
|
fn create_self_type_with_params(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
generics: &Generics)
|
generics: &Generics)
|
||||||
|
@ -258,7 +258,7 @@ fn create_self_type_with_params(cx: ext_ctxt,
|
||||||
@ast::Ty { id: cx.next_id(), node: self_type, span: span }
|
@ast::Ty { id: cx.next_id(), node: self_type, span: span }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_derived_impl(cx: ext_ctxt,
|
fn create_derived_impl(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
generics: &Generics,
|
generics: &Generics,
|
||||||
|
@ -320,7 +320,7 @@ fn create_derived_impl(cx: ext_ctxt,
|
||||||
return create_impl_item(cx, span, impl_item);
|
return create_impl_item(cx, span, impl_item);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_derived_eq_impl(cx: ext_ctxt,
|
fn create_derived_eq_impl(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
generics: &Generics,
|
generics: &Generics,
|
||||||
|
@ -336,7 +336,7 @@ fn create_derived_eq_impl(cx: ext_ctxt,
|
||||||
create_derived_impl(cx, span, type_ident, generics, methods, trait_path)
|
create_derived_impl(cx, span, type_ident, generics, methods, trait_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_derived_iter_bytes_impl(cx: ext_ctxt,
|
fn create_derived_iter_bytes_impl(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
generics: &Generics,
|
generics: &Generics,
|
||||||
|
@ -351,7 +351,7 @@ fn create_derived_iter_bytes_impl(cx: ext_ctxt,
|
||||||
create_derived_impl(cx, span, type_ident, generics, methods, trait_path)
|
create_derived_impl(cx, span, type_ident, generics, methods, trait_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_derived_clone_impl(cx: ext_ctxt,
|
fn create_derived_clone_impl(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
generics: &Generics,
|
generics: &Generics,
|
||||||
|
@ -368,7 +368,7 @@ fn create_derived_clone_impl(cx: ext_ctxt,
|
||||||
|
|
||||||
// Creates a method from the given set of statements conforming to the
|
// Creates a method from the given set of statements conforming to the
|
||||||
// signature of the `iter_bytes` method.
|
// signature of the `iter_bytes` method.
|
||||||
fn create_iter_bytes_method(cx: ext_ctxt,
|
fn create_iter_bytes_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
+statements: ~[@stmt])
|
+statements: ~[@stmt])
|
||||||
-> @method {
|
-> @method {
|
||||||
|
@ -417,7 +417,7 @@ fn create_iter_bytes_method(cx: ext_ctxt,
|
||||||
|
|
||||||
// Creates a method from the given expression conforming to the signature of
|
// Creates a method from the given expression conforming to the signature of
|
||||||
// the `clone` method.
|
// the `clone` method.
|
||||||
fn create_clone_method(cx: ext_ctxt,
|
fn create_clone_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
+type_ident: ast::ident,
|
+type_ident: ast::ident,
|
||||||
generics: &Generics,
|
generics: &Generics,
|
||||||
|
@ -467,7 +467,7 @@ fn create_clone_method(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_subpatterns(cx: ext_ctxt,
|
fn create_subpatterns(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
prefix: ~str,
|
prefix: ~str,
|
||||||
n: uint)
|
n: uint)
|
||||||
|
@ -496,7 +496,7 @@ fn is_struct_tuple(struct_def: &struct_def) -> bool {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_enum_variant_pattern(cx: ext_ctxt,
|
fn create_enum_variant_pattern(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
variant: &variant,
|
variant: &variant,
|
||||||
prefix: ~str)
|
prefix: ~str)
|
||||||
|
@ -542,7 +542,7 @@ fn create_enum_variant_pattern(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call_substructure_eq_method(cx: ext_ctxt,
|
fn call_substructure_eq_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
self_field: @expr,
|
self_field: @expr,
|
||||||
other_field_ref: @expr,
|
other_field_ref: @expr,
|
||||||
|
@ -571,7 +571,7 @@ fn call_substructure_eq_method(cx: ext_ctxt,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish_eq_chain_expr(cx: ext_ctxt,
|
fn finish_eq_chain_expr(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
chain_expr: Option<@expr>,
|
chain_expr: Option<@expr>,
|
||||||
junction: Junction)
|
junction: Junction)
|
||||||
|
@ -587,7 +587,7 @@ fn finish_eq_chain_expr(cx: ext_ctxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call_substructure_iter_bytes_method(cx: ext_ctxt,
|
fn call_substructure_iter_bytes_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
self_field: @expr)
|
self_field: @expr)
|
||||||
-> @stmt {
|
-> @stmt {
|
||||||
|
@ -612,7 +612,7 @@ fn call_substructure_iter_bytes_method(cx: ext_ctxt,
|
||||||
build::mk_stmt(cx, span, self_call)
|
build::mk_stmt(cx, span, self_call)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call_substructure_clone_method(cx: ext_ctxt,
|
fn call_substructure_clone_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
self_field: @expr)
|
self_field: @expr)
|
||||||
-> @expr {
|
-> @expr {
|
||||||
|
@ -622,7 +622,7 @@ fn call_substructure_clone_method(cx: ext_ctxt,
|
||||||
build::mk_call_(cx, span, self_method, ~[])
|
build::mk_call_(cx, span, self_method, ~[])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn variant_arg_count(cx: ext_ctxt, span: span, variant: &variant) -> uint {
|
fn variant_arg_count(cx: @ext_ctxt, span: span, variant: &variant) -> uint {
|
||||||
match variant.node.kind {
|
match variant.node.kind {
|
||||||
tuple_variant_kind(ref args) => args.len(),
|
tuple_variant_kind(ref args) => args.len(),
|
||||||
struct_variant_kind(ref struct_def) => struct_def.fields.len(),
|
struct_variant_kind(ref struct_def) => struct_def.fields.len(),
|
||||||
|
@ -632,7 +632,7 @@ fn variant_arg_count(cx: ext_ctxt, span: span, variant: &variant) -> uint {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_eq_struct_def(cx: ext_ctxt,
|
fn expand_deriving_eq_struct_def(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -672,7 +672,7 @@ fn expand_deriving_eq_struct_def(cx: ext_ctxt,
|
||||||
ne_method);
|
ne_method);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_eq_enum_def(cx: ext_ctxt,
|
fn expand_deriving_eq_enum_def(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
enum_definition: &enum_def,
|
enum_definition: &enum_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -705,7 +705,7 @@ fn expand_deriving_eq_enum_def(cx: ext_ctxt,
|
||||||
ne_method);
|
ne_method);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_iter_bytes_struct_def(cx: ext_ctxt,
|
fn expand_deriving_iter_bytes_struct_def(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -724,7 +724,7 @@ fn expand_deriving_iter_bytes_struct_def(cx: ext_ctxt,
|
||||||
method);
|
method);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_iter_bytes_enum_def(cx: ext_ctxt,
|
fn expand_deriving_iter_bytes_enum_def(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
enum_definition: &enum_def,
|
enum_definition: &enum_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -743,7 +743,7 @@ fn expand_deriving_iter_bytes_enum_def(cx: ext_ctxt,
|
||||||
method);
|
method);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_clone_struct_def(cx: ext_ctxt,
|
fn expand_deriving_clone_struct_def(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -768,7 +768,7 @@ fn expand_deriving_clone_struct_def(cx: ext_ctxt,
|
||||||
create_derived_clone_impl(cx, span, type_ident, generics, method)
|
create_derived_clone_impl(cx, span, type_ident, generics, method)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_clone_enum_def(cx: ext_ctxt,
|
fn expand_deriving_clone_enum_def(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
enum_definition: &enum_def,
|
enum_definition: &enum_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -785,7 +785,7 @@ fn expand_deriving_clone_enum_def(cx: ext_ctxt,
|
||||||
create_derived_clone_impl(cx, span, type_ident, generics, method)
|
create_derived_clone_impl(cx, span, type_ident, generics, method)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_eq_struct_method(cx: ext_ctxt,
|
fn expand_deriving_eq_struct_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
method_ident: ident,
|
method_ident: ident,
|
||||||
|
@ -841,7 +841,7 @@ fn expand_deriving_eq_struct_method(cx: ext_ctxt,
|
||||||
body);
|
body);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_iter_bytes_struct_method(cx: ext_ctxt,
|
fn expand_deriving_iter_bytes_struct_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def)
|
struct_def: &struct_def)
|
||||||
-> @method {
|
-> @method {
|
||||||
|
@ -875,7 +875,7 @@ fn expand_deriving_iter_bytes_struct_method(cx: ext_ctxt,
|
||||||
return create_iter_bytes_method(cx, span, statements);
|
return create_iter_bytes_method(cx, span, statements);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_clone_struct_method(cx: ext_ctxt,
|
fn expand_deriving_clone_struct_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -918,7 +918,7 @@ fn expand_deriving_clone_struct_method(cx: ext_ctxt,
|
||||||
create_clone_method(cx, span, type_ident, generics, struct_literal)
|
create_clone_method(cx, span, type_ident, generics, struct_literal)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_clone_tuple_struct_method(cx: ext_ctxt,
|
fn expand_deriving_clone_tuple_struct_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
@ -962,7 +962,7 @@ fn expand_deriving_clone_tuple_struct_method(cx: ext_ctxt,
|
||||||
create_clone_method(cx, span, type_ident, generics, self_match_expr)
|
create_clone_method(cx, span, type_ident, generics, self_match_expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_eq_enum_method(cx: ext_ctxt,
|
fn expand_deriving_eq_enum_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
enum_definition: &enum_def,
|
enum_definition: &enum_def,
|
||||||
method_ident: ident,
|
method_ident: ident,
|
||||||
|
@ -1096,7 +1096,7 @@ fn expand_deriving_eq_enum_method(cx: ext_ctxt,
|
||||||
self_match_expr);
|
self_match_expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_eq_struct_tuple_method(cx: ext_ctxt,
|
fn expand_deriving_eq_struct_tuple_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
struct_def: &struct_def,
|
struct_def: &struct_def,
|
||||||
method_ident: ident,
|
method_ident: ident,
|
||||||
|
@ -1155,7 +1155,7 @@ fn expand_deriving_eq_struct_tuple_method(cx: ext_ctxt,
|
||||||
type_ident, generics, self_match_expr)
|
type_ident, generics, self_match_expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_enum_or_struct_match(cx: ext_ctxt,
|
fn expand_enum_or_struct_match(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
arms: ~[ ast::arm ])
|
arms: ~[ ast::arm ])
|
||||||
-> @expr {
|
-> @expr {
|
||||||
|
@ -1166,7 +1166,7 @@ fn expand_enum_or_struct_match(cx: ext_ctxt,
|
||||||
build::mk_expr(cx, span, self_match_expr)
|
build::mk_expr(cx, span, self_match_expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_iter_bytes_enum_method(cx: ext_ctxt,
|
fn expand_deriving_iter_bytes_enum_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
enum_definition: &enum_def)
|
enum_definition: &enum_def)
|
||||||
-> @method {
|
-> @method {
|
||||||
|
@ -1221,7 +1221,7 @@ fn expand_deriving_iter_bytes_enum_method(cx: ext_ctxt,
|
||||||
create_iter_bytes_method(cx, span, ~[ self_match_stmt ])
|
create_iter_bytes_method(cx, span, ~[ self_match_stmt ])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_deriving_clone_enum_method(cx: ext_ctxt,
|
fn expand_deriving_clone_enum_method(cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
enum_definition: &enum_def,
|
enum_definition: &enum_def,
|
||||||
type_ident: ident,
|
type_ident: ident,
|
||||||
|
|
|
@ -23,7 +23,7 @@ use ext::base::*;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
use ext::build::mk_uniq_str;
|
use ext::build::mk_uniq_str;
|
||||||
|
|
||||||
pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_syntax_ext(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
|
|
||||||
let var = get_single_str_from_tts(cx, sp, tts, "env!");
|
let var = get_single_str_from_tts(cx, sp, tts, "env!");
|
||||||
|
|
|
@ -26,11 +26,11 @@ use core::option;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
|
|
||||||
pub fn expand_expr(extsbox: @mut SyntaxEnv,
|
pub fn expand_expr(extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
e: &expr_,
|
e: &expr_,
|
||||||
s: span,
|
s: span,
|
||||||
fld: ast_fold,
|
fld: @ast_fold,
|
||||||
orig: @fn(&expr_, span, ast_fold) -> (expr_, span))
|
orig: @fn(&expr_, span, @ast_fold) -> (expr_, span))
|
||||||
-> (expr_, span) {
|
-> (expr_, span) {
|
||||||
match *e {
|
match *e {
|
||||||
// expr_mac should really be expr_ext or something; it's the
|
// expr_mac should really be expr_ext or something; it's the
|
||||||
|
@ -112,10 +112,10 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
|
||||||
// NB: there is some redundancy between this and expand_item, below, and
|
// NB: there is some redundancy between this and expand_item, below, and
|
||||||
// they might benefit from some amount of semantic and language-UI merger.
|
// they might benefit from some amount of semantic and language-UI merger.
|
||||||
pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
|
pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
module_: &ast::_mod,
|
module_: &ast::_mod,
|
||||||
fld: ast_fold,
|
fld: @ast_fold,
|
||||||
orig: @fn(&ast::_mod, ast_fold) -> ast::_mod)
|
orig: @fn(&ast::_mod, @ast_fold) -> ast::_mod)
|
||||||
-> ast::_mod {
|
-> ast::_mod {
|
||||||
// Fold the contents first:
|
// Fold the contents first:
|
||||||
let module_ = orig(module_, fld);
|
let module_ = orig(module_, fld);
|
||||||
|
@ -163,10 +163,10 @@ macro_rules! with_exts_frame (
|
||||||
|
|
||||||
// When we enter a module, record it, for the sake of `module!`
|
// When we enter a module, record it, for the sake of `module!`
|
||||||
pub fn expand_item(extsbox: @mut SyntaxEnv,
|
pub fn expand_item(extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
it: @ast::item,
|
it: @ast::item,
|
||||||
fld: ast_fold,
|
fld: @ast_fold,
|
||||||
orig: @fn(@ast::item, ast_fold) -> Option<@ast::item>)
|
orig: @fn(@ast::item, @ast_fold) -> Option<@ast::item>)
|
||||||
-> Option<@ast::item> {
|
-> Option<@ast::item> {
|
||||||
// need to do expansion first... it might turn out to be a module.
|
// need to do expansion first... it might turn out to be a module.
|
||||||
let maybe_it = match it.node {
|
let maybe_it = match it.node {
|
||||||
|
@ -239,9 +239,9 @@ macro_rules! without_macro_scoping(
|
||||||
// Support for item-position macro invocations, exactly the same
|
// Support for item-position macro invocations, exactly the same
|
||||||
// logic as for expression-position macro invocations.
|
// logic as for expression-position macro invocations.
|
||||||
pub fn expand_item_mac(+extsbox: @mut SyntaxEnv,
|
pub fn expand_item_mac(+extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt, &&it: @ast::item,
|
cx: @ext_ctxt, &&it: @ast::item,
|
||||||
fld: ast_fold) -> Option<@ast::item> {
|
fld: @ast_fold)
|
||||||
|
-> Option<@ast::item> {
|
||||||
let (pth, tts) = match it.node {
|
let (pth, tts) = match it.node {
|
||||||
item_mac(codemap::spanned { node: mac_invoc_tt(pth, ref tts), _}) => {
|
item_mac(codemap::spanned { node: mac_invoc_tt(pth, ref tts), _}) => {
|
||||||
(pth, copy *tts)
|
(pth, copy *tts)
|
||||||
|
@ -307,11 +307,11 @@ pub fn expand_item_mac(+extsbox: @mut SyntaxEnv,
|
||||||
|
|
||||||
// expand a stmt
|
// expand a stmt
|
||||||
pub fn expand_stmt(extsbox: @mut SyntaxEnv,
|
pub fn expand_stmt(extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
s: &stmt_,
|
s: &stmt_,
|
||||||
sp: span,
|
sp: span,
|
||||||
fld: ast_fold,
|
fld: @ast_fold,
|
||||||
orig: @fn(&stmt_, span, ast_fold) -> (stmt_, span))
|
orig: @fn(&stmt_, span, @ast_fold) -> (stmt_, span))
|
||||||
-> (stmt_, span) {
|
-> (stmt_, span) {
|
||||||
let (mac, pth, tts, semi) = match *s {
|
let (mac, pth, tts, semi) = match *s {
|
||||||
stmt_mac(ref mac, semi) => {
|
stmt_mac(ref mac, semi) => {
|
||||||
|
@ -373,11 +373,11 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
|
||||||
|
|
||||||
|
|
||||||
pub fn expand_block(extsbox: @mut SyntaxEnv,
|
pub fn expand_block(extsbox: @mut SyntaxEnv,
|
||||||
cx: ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
blk: &blk_,
|
blk: &blk_,
|
||||||
sp: span,
|
sp: span,
|
||||||
fld: ast_fold,
|
fld: @ast_fold,
|
||||||
orig: @fn(&blk_, span, ast_fold) -> (blk_, span))
|
orig: @fn(&blk_, span, @ast_fold) -> (blk_, span))
|
||||||
-> (blk_, span) {
|
-> (blk_, span) {
|
||||||
match (*extsbox).find(&@~" block") {
|
match (*extsbox).find(&@~" block") {
|
||||||
// no scope limit on macros in this block, no need
|
// no scope limit on macros in this block, no need
|
||||||
|
@ -395,7 +395,7 @@ pub fn expand_block(extsbox: @mut SyntaxEnv,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_span(cx: ext_ctxt, sp: span) -> span {
|
pub fn new_span(cx: @ext_ctxt, sp: span) -> span {
|
||||||
/* this discards information in the case of macro-defining macros */
|
/* this discards information in the case of macro-defining macros */
|
||||||
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
|
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
|
||||||
}
|
}
|
||||||
|
@ -488,7 +488,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess,
|
||||||
// every method/element of AstFoldFns in fold.rs.
|
// every method/element of AstFoldFns in fold.rs.
|
||||||
let extsbox = @mut syntax_expander_table();
|
let extsbox = @mut syntax_expander_table();
|
||||||
let afp = default_ast_fold();
|
let afp = default_ast_fold();
|
||||||
let cx: ext_ctxt = mk_ctxt(parse_sess, copy cfg);
|
let cx: @ext_ctxt = mk_ctxt(parse_sess, copy cfg);
|
||||||
let f_pre = @AstFoldFns {
|
let f_pre = @AstFoldFns {
|
||||||
fold_expr: |expr,span,recur|
|
fold_expr: |expr,span,recur|
|
||||||
expand_expr(extsbox, cx, expr, span, recur, afp.fold_expr),
|
expand_expr(extsbox, cx, expr, span, recur, afp.fold_expr),
|
||||||
|
|
|
@ -27,7 +27,7 @@ use ext::build::*;
|
||||||
|
|
||||||
use core::unstable::extfmt::ct::*;
|
use core::unstable::extfmt::ct::*;
|
||||||
|
|
||||||
pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_syntax_ext(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let args = get_exprs_from_tts(cx, tts);
|
let args = get_exprs_from_tts(cx, tts);
|
||||||
if args.len() == 0 {
|
if args.len() == 0 {
|
||||||
|
@ -38,7 +38,7 @@ pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
~"first argument to fmt! must be a string literal.");
|
~"first argument to fmt! must be a string literal.");
|
||||||
let fmtspan = args[0].span;
|
let fmtspan = args[0].span;
|
||||||
debug!("Format string: %s", fmt);
|
debug!("Format string: %s", fmt);
|
||||||
fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: &str) -> ! {
|
fn parse_fmt_err_(cx: @ext_ctxt, sp: span, msg: &str) -> ! {
|
||||||
cx.span_fatal(sp, msg);
|
cx.span_fatal(sp, msg);
|
||||||
}
|
}
|
||||||
let parse_fmt_err: @fn(&str) -> ! = |s| parse_fmt_err_(cx, fmtspan, s);
|
let parse_fmt_err: @fn(&str) -> ! = |s| parse_fmt_err_(cx, fmtspan, s);
|
||||||
|
@ -50,23 +50,23 @@ pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
// probably be factored out in common with other code that builds
|
// probably be factored out in common with other code that builds
|
||||||
// expressions. Also: Cleanup the naming of these functions.
|
// expressions. Also: Cleanup the naming of these functions.
|
||||||
// Note: Moved many of the common ones to build.rs --kevina
|
// Note: Moved many of the common ones to build.rs --kevina
|
||||||
fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
fn pieces_to_expr(cx: @ext_ctxt, sp: span,
|
||||||
pieces: ~[Piece], args: ~[@ast::expr])
|
pieces: ~[Piece], args: ~[@ast::expr])
|
||||||
-> @ast::expr {
|
-> @ast::expr {
|
||||||
fn make_path_vec(cx: ext_ctxt, ident: @~str) -> ~[ast::ident] {
|
fn make_path_vec(cx: @ext_ctxt, ident: @~str) -> ~[ast::ident] {
|
||||||
let intr = cx.parse_sess().interner;
|
let intr = cx.parse_sess().interner;
|
||||||
return ~[intr.intern(@~"unstable"), intr.intern(@~"extfmt"),
|
return ~[intr.intern(@~"unstable"), intr.intern(@~"extfmt"),
|
||||||
intr.intern(@~"rt"), intr.intern(ident)];
|
intr.intern(@~"rt"), intr.intern(ident)];
|
||||||
}
|
}
|
||||||
fn make_rt_path_expr(cx: ext_ctxt, sp: span, nm: @~str) -> @ast::expr {
|
fn make_rt_path_expr(cx: @ext_ctxt, sp: span, nm: @~str) -> @ast::expr {
|
||||||
let path = make_path_vec(cx, nm);
|
let path = make_path_vec(cx, nm);
|
||||||
return mk_path_global(cx, sp, path);
|
return mk_path_global(cx, sp, path);
|
||||||
}
|
}
|
||||||
// Produces an AST expression that represents a RT::conv record,
|
// Produces an AST expression that represents a RT::conv record,
|
||||||
// which tells the RT::conv* functions how to perform the conversion
|
// which tells the RT::conv* functions how to perform the conversion
|
||||||
|
|
||||||
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: Conv) -> @ast::expr {
|
fn make_rt_conv_expr(cx: @ext_ctxt, sp: span, cnv: Conv) -> @ast::expr {
|
||||||
fn make_flags(cx: ext_ctxt, sp: span, flags: ~[Flag]) -> @ast::expr {
|
fn make_flags(cx: @ext_ctxt, sp: span, flags: ~[Flag]) -> @ast::expr {
|
||||||
let mut tmp_expr = make_rt_path_expr(cx, sp, @~"flag_none");
|
let mut tmp_expr = make_rt_path_expr(cx, sp, @~"flag_none");
|
||||||
for flags.each |f| {
|
for flags.each |f| {
|
||||||
let fstr = match *f {
|
let fstr = match *f {
|
||||||
|
@ -81,7 +81,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
}
|
}
|
||||||
return tmp_expr;
|
return tmp_expr;
|
||||||
}
|
}
|
||||||
fn make_count(cx: ext_ctxt, sp: span, cnt: Count) -> @ast::expr {
|
fn make_count(cx: @ext_ctxt, sp: span, cnt: Count) -> @ast::expr {
|
||||||
match cnt {
|
match cnt {
|
||||||
CountImplied => {
|
CountImplied => {
|
||||||
return make_rt_path_expr(cx, sp, @~"CountImplied");
|
return make_rt_path_expr(cx, sp, @~"CountImplied");
|
||||||
|
@ -95,7 +95,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
_ => cx.span_unimpl(sp, ~"unimplemented fmt! conversion")
|
_ => cx.span_unimpl(sp, ~"unimplemented fmt! conversion")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn make_ty(cx: ext_ctxt, sp: span, t: Ty) -> @ast::expr {
|
fn make_ty(cx: @ext_ctxt, sp: span, t: Ty) -> @ast::expr {
|
||||||
let mut rt_type;
|
let mut rt_type;
|
||||||
match t {
|
match t {
|
||||||
TyHex(c) => match c {
|
TyHex(c) => match c {
|
||||||
|
@ -108,7 +108,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
}
|
}
|
||||||
return make_rt_path_expr(cx, sp, @rt_type);
|
return make_rt_path_expr(cx, sp, @rt_type);
|
||||||
}
|
}
|
||||||
fn make_conv_struct(cx: ext_ctxt, sp: span, flags_expr: @ast::expr,
|
fn make_conv_struct(cx: @ext_ctxt, sp: span, flags_expr: @ast::expr,
|
||||||
width_expr: @ast::expr, precision_expr: @ast::expr,
|
width_expr: @ast::expr, precision_expr: @ast::expr,
|
||||||
ty_expr: @ast::expr) -> @ast::expr {
|
ty_expr: @ast::expr) -> @ast::expr {
|
||||||
let intr = cx.parse_sess().interner;
|
let intr = cx.parse_sess().interner;
|
||||||
|
@ -139,7 +139,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
make_conv_struct(cx, sp, rt_conv_flags, rt_conv_width,
|
make_conv_struct(cx, sp, rt_conv_flags, rt_conv_width,
|
||||||
rt_conv_precision, rt_conv_ty)
|
rt_conv_precision, rt_conv_ty)
|
||||||
}
|
}
|
||||||
fn make_conv_call(cx: ext_ctxt, sp: span, conv_type: ~str, cnv: Conv,
|
fn make_conv_call(cx: @ext_ctxt, sp: span, conv_type: ~str, cnv: Conv,
|
||||||
arg: @ast::expr) -> @ast::expr {
|
arg: @ast::expr) -> @ast::expr {
|
||||||
let fname = ~"conv_" + conv_type;
|
let fname = ~"conv_" + conv_type;
|
||||||
let path = make_path_vec(cx, @fname);
|
let path = make_path_vec(cx, @fname);
|
||||||
|
@ -148,7 +148,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||||
return mk_call_global(cx, arg.span, path, args);
|
return mk_call_global(cx, arg.span, path, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_new_conv(cx: ext_ctxt, sp: span, cnv: Conv, arg: @ast::expr) ->
|
fn make_new_conv(cx: @ext_ctxt, sp: span, cnv: Conv, arg: @ast::expr) ->
|
||||||
@ast::expr {
|
@ast::expr {
|
||||||
// FIXME: Move validation code into core::extfmt (Issue #2249)
|
// FIXME: Move validation code into core::extfmt (Issue #2249)
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ use ext::base::*;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
use print;
|
use print;
|
||||||
|
|
||||||
pub fn expand_syntax_ext(cx: ext_ctxt,
|
pub fn expand_syntax_ext(cx: @ext_ctxt,
|
||||||
sp: codemap::span,
|
sp: codemap::span,
|
||||||
tt: &[ast::token_tree])
|
tt: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
|
|
|
@ -138,7 +138,7 @@ pub trait ext_ctxt_ast_builder {
|
||||||
fn strip_bounds(&self, bounds: &Generics) -> Generics;
|
fn strip_bounds(&self, bounds: &Generics) -> Generics;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ext_ctxt_ast_builder for ext_ctxt {
|
impl ext_ctxt_ast_builder for @ext_ctxt {
|
||||||
fn ty_option(&self, ty: @ast::Ty) -> @ast::Ty {
|
fn ty_option(&self, ty: @ast::Ty) -> @ast::Ty {
|
||||||
self.ty_path_ast_builder(path_global(~[
|
self.ty_path_ast_builder(path_global(~[
|
||||||
self.ident_of(~"core"),
|
self.ident_of(~"core"),
|
||||||
|
|
|
@ -37,9 +37,8 @@ use ext::base::ext_ctxt;
|
||||||
use ext::pipes::proto::{state, protocol, next_state};
|
use ext::pipes::proto::{state, protocol, next_state};
|
||||||
use ext::pipes::proto;
|
use ext::pipes::proto;
|
||||||
|
|
||||||
impl proto::visitor<(), (), ()> for ext_ctxt {
|
impl proto::visitor<(), (), ()> for @ext_ctxt {
|
||||||
fn visit_proto(&self, _proto: protocol,
|
fn visit_proto(&self, _proto: protocol, _states: &[()]) { }
|
||||||
_states: &[()]) { }
|
|
||||||
|
|
||||||
fn visit_state(&self, state: state, _m: &[()]) {
|
fn visit_state(&self, state: state, _m: &[()]) {
|
||||||
if state.messages.len() == 0 {
|
if state.messages.len() == 0 {
|
||||||
|
|
|
@ -45,7 +45,7 @@ use ext::pipes::proto::protocol;
|
||||||
use core::str;
|
use core::str;
|
||||||
use std::bitv::Bitv;
|
use std::bitv::Bitv;
|
||||||
|
|
||||||
pub fn analyze(proto: protocol, _cx: ext_ctxt) {
|
pub fn analyze(proto: protocol, _cx: @ext_ctxt) {
|
||||||
debug!("initializing colive analysis");
|
debug!("initializing colive analysis");
|
||||||
let num_states = proto.num_states();
|
let num_states = proto.num_states();
|
||||||
let mut colive = do (copy proto.states).map_to_vec |state| {
|
let mut colive = do (copy proto.states).map_to_vec |state| {
|
||||||
|
|
|
@ -63,13 +63,15 @@ pub mod check;
|
||||||
pub mod liveness;
|
pub mod liveness;
|
||||||
|
|
||||||
|
|
||||||
pub fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
|
pub fn expand_proto(cx: @ext_ctxt, _sp: span, id: ast::ident,
|
||||||
tt: ~[ast::token_tree]) -> base::MacResult {
|
tt: ~[ast::token_tree]) -> base::MacResult {
|
||||||
let sess = cx.parse_sess();
|
let sess = cx.parse_sess();
|
||||||
let cfg = cx.cfg();
|
let cfg = cx.cfg();
|
||||||
let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic,
|
let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic,
|
||||||
cx.parse_sess().interner, None, copy tt);
|
cx.parse_sess().interner,
|
||||||
let rdr = tt_rdr as reader;
|
None,
|
||||||
|
copy tt);
|
||||||
|
let rdr = tt_rdr as @reader;
|
||||||
let rust_parser = Parser(sess, cfg, rdr.dup());
|
let rust_parser = Parser(sess, cfg, rdr.dup());
|
||||||
|
|
||||||
let mut proto = rust_parser.parse_proto(cx.str_of(id));
|
let mut proto = rust_parser.parse_proto(cx.str_of(id));
|
||||||
|
|
|
@ -26,27 +26,27 @@ use core::to_str::ToStr;
|
||||||
use core::vec;
|
use core::vec;
|
||||||
|
|
||||||
pub trait gen_send {
|
pub trait gen_send {
|
||||||
fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item;
|
fn gen_send(&mut self, cx: @ext_ctxt, try: bool) -> @ast::item;
|
||||||
fn to_ty(&mut self, cx: ext_ctxt) -> @ast::Ty;
|
fn to_ty(&mut self, cx: @ext_ctxt) -> @ast::Ty;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait to_type_decls {
|
pub trait to_type_decls {
|
||||||
fn to_type_decls(&self, cx: ext_ctxt) -> ~[@ast::item];
|
fn to_type_decls(&self, cx: @ext_ctxt) -> ~[@ast::item];
|
||||||
fn to_endpoint_decls(&self, cx: ext_ctxt,
|
fn to_endpoint_decls(&self, cx: @ext_ctxt,
|
||||||
dir: direction) -> ~[@ast::item];
|
dir: direction) -> ~[@ast::item];
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait gen_init {
|
pub trait gen_init {
|
||||||
fn gen_init(&self, cx: ext_ctxt) -> @ast::item;
|
fn gen_init(&self, cx: @ext_ctxt) -> @ast::item;
|
||||||
fn compile(&self, cx: ext_ctxt) -> @ast::item;
|
fn compile(&self, cx: @ext_ctxt) -> @ast::item;
|
||||||
fn buffer_ty_path(&self, cx: ext_ctxt) -> @ast::Ty;
|
fn buffer_ty_path(&self, cx: @ext_ctxt) -> @ast::Ty;
|
||||||
fn gen_buffer_type(&self, cx: ext_ctxt) -> @ast::item;
|
fn gen_buffer_type(&self, cx: @ext_ctxt) -> @ast::item;
|
||||||
fn gen_buffer_init(&self, ext_cx: ext_ctxt) -> @ast::expr;
|
fn gen_buffer_init(&self, ext_cx: @ext_ctxt) -> @ast::expr;
|
||||||
fn gen_init_bounded(&self, ext_cx: ext_ctxt) -> @ast::expr;
|
fn gen_init_bounded(&self, ext_cx: @ext_ctxt) -> @ast::expr;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl gen_send for message {
|
impl gen_send for message {
|
||||||
fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item {
|
fn gen_send(&mut self, cx: @ext_ctxt, try: bool) -> @ast::item {
|
||||||
debug!("pipec: gen_send");
|
debug!("pipec: gen_send");
|
||||||
let name = self.name();
|
let name = self.name();
|
||||||
|
|
||||||
|
@ -188,14 +188,14 @@ impl gen_send for message {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_ty(&mut self, cx: ext_ctxt) -> @ast::Ty {
|
fn to_ty(&mut self, cx: @ext_ctxt) -> @ast::Ty {
|
||||||
cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span())
|
cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span())
|
||||||
.add_tys(cx.ty_vars_global(&self.get_generics().ty_params)))
|
.add_tys(cx.ty_vars_global(&self.get_generics().ty_params)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl to_type_decls for state {
|
impl to_type_decls for state {
|
||||||
fn to_type_decls(&self, cx: ext_ctxt) -> ~[@ast::item] {
|
fn to_type_decls(&self, cx: @ext_ctxt) -> ~[@ast::item] {
|
||||||
debug!("pipec: to_type_decls");
|
debug!("pipec: to_type_decls");
|
||||||
// This compiles into two different type declarations. Say the
|
// This compiles into two different type declarations. Say the
|
||||||
// state is called ping. This will generate both `ping` and
|
// state is called ping. This will generate both `ping` and
|
||||||
|
@ -244,7 +244,7 @@ impl to_type_decls for state {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_endpoint_decls(&self, cx: ext_ctxt,
|
fn to_endpoint_decls(&self, cx: @ext_ctxt,
|
||||||
dir: direction) -> ~[@ast::item] {
|
dir: direction) -> ~[@ast::item] {
|
||||||
debug!("pipec: to_endpoint_decls");
|
debug!("pipec: to_endpoint_decls");
|
||||||
let dir = match dir {
|
let dir = match dir {
|
||||||
|
@ -306,7 +306,7 @@ impl to_type_decls for state {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl gen_init for protocol {
|
impl gen_init for protocol {
|
||||||
fn gen_init(&self, cx: ext_ctxt) -> @ast::item {
|
fn gen_init(&self, cx: @ext_ctxt) -> @ast::item {
|
||||||
let ext_cx = cx;
|
let ext_cx = cx;
|
||||||
|
|
||||||
debug!("gen_init");
|
debug!("gen_init");
|
||||||
|
@ -344,7 +344,7 @@ impl gen_init for protocol {
|
||||||
body.to_source(cx)))
|
body.to_source(cx)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_buffer_init(&self, ext_cx: ext_ctxt) -> @ast::expr {
|
fn gen_buffer_init(&self, ext_cx: @ext_ctxt) -> @ast::expr {
|
||||||
ext_cx.struct_expr(path(~[ext_cx.ident_of(~"__Buffer")],
|
ext_cx.struct_expr(path(~[ext_cx.ident_of(~"__Buffer")],
|
||||||
dummy_sp()),
|
dummy_sp()),
|
||||||
self.states.map_to_vec(|s| {
|
self.states.map_to_vec(|s| {
|
||||||
|
@ -356,7 +356,7 @@ impl gen_init for protocol {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_init_bounded(&self, ext_cx: ext_ctxt) -> @ast::expr {
|
fn gen_init_bounded(&self, ext_cx: @ext_ctxt) -> @ast::expr {
|
||||||
debug!("gen_init_bounded");
|
debug!("gen_init_bounded");
|
||||||
let buffer_fields = self.gen_buffer_init(ext_cx);
|
let buffer_fields = self.gen_buffer_init(ext_cx);
|
||||||
let buffer = quote_expr!(~::core::pipes::Buffer {
|
let buffer = quote_expr!(~::core::pipes::Buffer {
|
||||||
|
@ -382,7 +382,7 @@ impl gen_init for protocol {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn buffer_ty_path(&self, cx: ext_ctxt) -> @ast::Ty {
|
fn buffer_ty_path(&self, cx: @ext_ctxt) -> @ast::Ty {
|
||||||
let mut params: OptVec<ast::TyParam> = opt_vec::Empty;
|
let mut params: OptVec<ast::TyParam> = opt_vec::Empty;
|
||||||
for (copy self.states).each |s| {
|
for (copy self.states).each |s| {
|
||||||
for s.generics.ty_params.each |tp| {
|
for s.generics.ty_params.each |tp| {
|
||||||
|
@ -399,7 +399,7 @@ impl gen_init for protocol {
|
||||||
.add_tys(cx.ty_vars_global(¶ms)))
|
.add_tys(cx.ty_vars_global(¶ms)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_buffer_type(&self, cx: ext_ctxt) -> @ast::item {
|
fn gen_buffer_type(&self, cx: @ext_ctxt) -> @ast::item {
|
||||||
let ext_cx = cx;
|
let ext_cx = cx;
|
||||||
let mut params: OptVec<ast::TyParam> = opt_vec::Empty;
|
let mut params: OptVec<ast::TyParam> = opt_vec::Empty;
|
||||||
let fields = do (copy self.states).map_to_vec |s| {
|
let fields = do (copy self.states).map_to_vec |s| {
|
||||||
|
@ -442,7 +442,7 @@ impl gen_init for protocol {
|
||||||
cx.strip_bounds(&generics))
|
cx.strip_bounds(&generics))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile(&self, cx: ext_ctxt) -> @ast::item {
|
fn compile(&self, cx: @ext_ctxt) -> @ast::item {
|
||||||
let mut items = ~[self.gen_init(cx)];
|
let mut items = ~[self.gen_init(cx)];
|
||||||
let mut client_states = ~[];
|
let mut client_states = ~[];
|
||||||
let mut server_states = ~[];
|
let mut server_states = ~[];
|
||||||
|
|
|
@ -96,7 +96,7 @@ pub impl state_ {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the type that is used for the messages.
|
/// Returns the type that is used for the messages.
|
||||||
fn to_ty(&self, cx: ext_ctxt) -> @ast::Ty {
|
fn to_ty(&self, cx: @ext_ctxt) -> @ast::Ty {
|
||||||
cx.ty_path_ast_builder
|
cx.ty_path_ast_builder
|
||||||
(path(~[cx.ident_of(self.name)],self.span).add_tys(
|
(path(~[cx.ident_of(self.name)],self.span).add_tys(
|
||||||
cx.ty_vars(&self.generics.ty_params)))
|
cx.ty_vars(&self.generics.ty_params)))
|
||||||
|
|
|
@ -49,11 +49,11 @@ pub mod rt {
|
||||||
use print::pprust::{item_to_str, ty_to_str};
|
use print::pprust::{item_to_str, ty_to_str};
|
||||||
|
|
||||||
pub trait ToTokens {
|
pub trait ToTokens {
|
||||||
pub fn to_tokens(&self, _cx: ext_ctxt) -> ~[token_tree];
|
pub fn to_tokens(&self, _cx: @ext_ctxt) -> ~[token_tree];
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ~[token_tree] {
|
impl ToTokens for ~[token_tree] {
|
||||||
pub fn to_tokens(&self, _cx: ext_ctxt) -> ~[token_tree] {
|
pub fn to_tokens(&self, _cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
copy *self
|
copy *self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -62,10 +62,10 @@ pub mod rt {
|
||||||
|
|
||||||
trait ToSource : ToTokens {
|
trait ToSource : ToTokens {
|
||||||
// Takes a thing and generates a string containing rust code for it.
|
// Takes a thing and generates a string containing rust code for it.
|
||||||
pub fn to_source(cx: ext_ctxt) -> ~str;
|
pub fn to_source(cx: @ext_ctxt) -> ~str;
|
||||||
|
|
||||||
// If you can make source, you can definitely make tokens.
|
// If you can make source, you can definitely make tokens.
|
||||||
pub fn to_tokens(cx: ext_ctxt) -> ~[token_tree] {
|
pub fn to_tokens(cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,47 +74,47 @@ pub mod rt {
|
||||||
|
|
||||||
pub trait ToSource {
|
pub trait ToSource {
|
||||||
// Takes a thing and generates a string containing rust code for it.
|
// Takes a thing and generates a string containing rust code for it.
|
||||||
pub fn to_source(&self, cx: ext_ctxt) -> ~str;
|
pub fn to_source(&self, cx: @ext_ctxt) -> ~str;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for ast::ident {
|
impl ToSource for ast::ident {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
copy *cx.parse_sess().interner.get(*self)
|
copy *cx.parse_sess().interner.get(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for @ast::item {
|
impl ToSource for @ast::item {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
item_to_str(*self, cx.parse_sess().interner)
|
item_to_str(*self, cx.parse_sess().interner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for ~[@ast::item] {
|
impl ToSource for ~[@ast::item] {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
str::connect(self.map(|i| i.to_source(cx)), ~"\n\n")
|
str::connect(self.map(|i| i.to_source(cx)), ~"\n\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for @ast::Ty {
|
impl ToSource for @ast::Ty {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
ty_to_str(*self, cx.parse_sess().interner)
|
ty_to_str(*self, cx.parse_sess().interner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for ~[@ast::Ty] {
|
impl ToSource for ~[@ast::Ty] {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
str::connect(self.map(|i| i.to_source(cx)), ~", ")
|
str::connect(self.map(|i| i.to_source(cx)), ~", ")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for Generics {
|
impl ToSource for Generics {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
pprust::generics_to_str(self, cx.parse_sess().interner)
|
pprust::generics_to_str(self, cx.parse_sess().interner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for @ast::expr {
|
impl ToSource for @ast::expr {
|
||||||
fn to_source(&self, cx: ext_ctxt) -> ~str {
|
fn to_source(&self, cx: @ext_ctxt) -> ~str {
|
||||||
pprust::expr_to_str(*self, cx.parse_sess().interner)
|
pprust::expr_to_str(*self, cx.parse_sess().interner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -122,43 +122,43 @@ pub mod rt {
|
||||||
// Alas ... we write these out instead. All redundant.
|
// Alas ... we write these out instead. All redundant.
|
||||||
|
|
||||||
impl ToTokens for ast::ident {
|
impl ToTokens for ast::ident {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for @ast::item {
|
impl ToTokens for @ast::item {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ~[@ast::item] {
|
impl ToTokens for ~[@ast::item] {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for @ast::Ty {
|
impl ToTokens for @ast::Ty {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ~[@ast::Ty] {
|
impl ToTokens for ~[@ast::Ty] {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for Generics {
|
impl ToTokens for Generics {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for @ast::expr {
|
impl ToTokens for @ast::expr {
|
||||||
fn to_tokens(&self, cx: ext_ctxt) -> ~[token_tree] {
|
fn to_tokens(&self, cx: @ext_ctxt) -> ~[token_tree] {
|
||||||
cx.parse_tts(self.to_source(cx))
|
cx.parse_tts(self.to_source(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -170,7 +170,7 @@ pub mod rt {
|
||||||
fn parse_tts(&self, s: ~str) -> ~[ast::token_tree];
|
fn parse_tts(&self, s: ~str) -> ~[ast::token_tree];
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExtParseUtils for ext_ctxt {
|
impl ExtParseUtils for @ext_ctxt {
|
||||||
|
|
||||||
fn parse_item(&self, s: ~str) -> @ast::item {
|
fn parse_item(&self, s: ~str) -> @ast::item {
|
||||||
let res = parse::parse_item_from_source_str(
|
let res = parse::parse_item_from_source_str(
|
||||||
|
@ -216,19 +216,19 @@ pub mod rt {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_quote_tokens(cx: ext_ctxt,
|
pub fn expand_quote_tokens(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> base::MacResult {
|
tts: &[ast::token_tree]) -> base::MacResult {
|
||||||
base::MRExpr(expand_tts(cx, sp, tts))
|
base::MRExpr(expand_tts(cx, sp, tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_quote_expr(cx: ext_ctxt,
|
pub fn expand_quote_expr(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> base::MacResult {
|
tts: &[ast::token_tree]) -> base::MacResult {
|
||||||
base::MRExpr(expand_parse_call(cx, sp, ~"parse_expr", ~[], tts))
|
base::MRExpr(expand_parse_call(cx, sp, ~"parse_expr", ~[], tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_quote_item(cx: ext_ctxt,
|
pub fn expand_quote_item(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> base::MacResult {
|
tts: &[ast::token_tree]) -> base::MacResult {
|
||||||
let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]);
|
let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]);
|
||||||
|
@ -236,7 +236,7 @@ pub fn expand_quote_item(cx: ext_ctxt,
|
||||||
~[e_attrs], tts))
|
~[e_attrs], tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_quote_pat(cx: ext_ctxt,
|
pub fn expand_quote_pat(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> base::MacResult {
|
tts: &[ast::token_tree]) -> base::MacResult {
|
||||||
let e_refutable = build::mk_lit(cx, sp, ast::lit_bool(true));
|
let e_refutable = build::mk_lit(cx, sp, ast::lit_bool(true));
|
||||||
|
@ -244,7 +244,7 @@ pub fn expand_quote_pat(cx: ext_ctxt,
|
||||||
~[e_refutable], tts))
|
~[e_refutable], tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_quote_ty(cx: ext_ctxt,
|
pub fn expand_quote_ty(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> base::MacResult {
|
tts: &[ast::token_tree]) -> base::MacResult {
|
||||||
let e_param_colons = build::mk_lit(cx, sp, ast::lit_bool(false));
|
let e_param_colons = build::mk_lit(cx, sp, ast::lit_bool(false));
|
||||||
|
@ -252,7 +252,7 @@ pub fn expand_quote_ty(cx: ext_ctxt,
|
||||||
~[e_param_colons], tts))
|
~[e_param_colons], tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_quote_stmt(cx: ext_ctxt,
|
pub fn expand_quote_stmt(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> base::MacResult {
|
tts: &[ast::token_tree]) -> base::MacResult {
|
||||||
let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]);
|
let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]);
|
||||||
|
@ -260,16 +260,16 @@ pub fn expand_quote_stmt(cx: ext_ctxt,
|
||||||
~[e_attrs], tts))
|
~[e_attrs], tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] {
|
fn ids_ext(cx: @ext_ctxt, strs: ~[~str]) -> ~[ast::ident] {
|
||||||
strs.map(|str| cx.parse_sess().interner.intern(@copy *str))
|
strs.map(|str| cx.parse_sess().interner.intern(@copy *str))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn id_ext(cx: ext_ctxt, +str: ~str) -> ast::ident {
|
fn id_ext(cx: @ext_ctxt, +str: ~str) -> ast::ident {
|
||||||
cx.parse_sess().interner.intern(@str)
|
cx.parse_sess().interner.intern(@str)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lift an ident to the expr that evaluates to that ident.
|
// Lift an ident to the expr that evaluates to that ident.
|
||||||
fn mk_ident(cx: ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr {
|
fn mk_ident(cx: @ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr {
|
||||||
let e_meth = build::mk_access(cx, sp,
|
let e_meth = build::mk_access(cx, sp,
|
||||||
ids_ext(cx, ~[~"ext_cx"]),
|
ids_ext(cx, ~[~"ext_cx"]),
|
||||||
id_ext(cx, ~"ident_of"));
|
id_ext(cx, ~"ident_of"));
|
||||||
|
@ -277,13 +277,13 @@ fn mk_ident(cx: ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr {
|
||||||
build::mk_call_(cx, sp, e_meth, ~[e_str])
|
build::mk_call_(cx, sp, e_meth, ~[e_str])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_bytepos(cx: ext_ctxt, sp: span, bpos: BytePos) -> @ast::expr {
|
fn mk_bytepos(cx: @ext_ctxt, sp: span, bpos: BytePos) -> @ast::expr {
|
||||||
let path = ids_ext(cx, ~[~"BytePos"]);
|
let path = ids_ext(cx, ~[~"BytePos"]);
|
||||||
let arg = build::mk_uint(cx, sp, bpos.to_uint());
|
let arg = build::mk_uint(cx, sp, bpos.to_uint());
|
||||||
build::mk_call(cx, sp, path, ~[arg])
|
build::mk_call(cx, sp, path, ~[arg])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_binop(cx: ext_ctxt, sp: span, bop: token::binop) -> @ast::expr {
|
fn mk_binop(cx: @ext_ctxt, sp: span, bop: token::binop) -> @ast::expr {
|
||||||
let name = match bop {
|
let name = match bop {
|
||||||
PLUS => "PLUS",
|
PLUS => "PLUS",
|
||||||
MINUS => "MINUS",
|
MINUS => "MINUS",
|
||||||
|
@ -300,7 +300,7 @@ fn mk_binop(cx: ext_ctxt, sp: span, bop: token::binop) -> @ast::expr {
|
||||||
ids_ext(cx, ~[name.to_owned()]))
|
ids_ext(cx, ~[name.to_owned()]))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
fn mk_token(cx: @ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
||||||
|
|
||||||
match tok {
|
match tok {
|
||||||
BINOP(binop) => {
|
BINOP(binop) => {
|
||||||
|
@ -443,7 +443,7 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree)
|
fn mk_tt(cx: @ext_ctxt, sp: span, tt: &ast::token_tree)
|
||||||
-> ~[@ast::stmt] {
|
-> ~[@ast::stmt] {
|
||||||
|
|
||||||
match *tt {
|
match *tt {
|
||||||
|
@ -494,7 +494,7 @@ fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
fn mk_tts(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> ~[@ast::stmt] {
|
-> ~[@ast::stmt] {
|
||||||
let mut ss = ~[];
|
let mut ss = ~[];
|
||||||
for tts.each |tt| {
|
for tts.each |tt| {
|
||||||
|
@ -503,7 +503,7 @@ fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
ss
|
ss
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_tts(cx: ext_ctxt,
|
fn expand_tts(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
tts: &[ast::token_tree]) -> @ast::expr {
|
tts: &[ast::token_tree]) -> @ast::expr {
|
||||||
|
|
||||||
|
@ -577,7 +577,7 @@ fn expand_tts(cx: ext_ctxt,
|
||||||
ids_ext(cx, ~[~"tt"]))))
|
ids_ext(cx, ~[~"tt"]))))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_parse_call(cx: ext_ctxt,
|
fn expand_parse_call(cx: @ext_ctxt,
|
||||||
sp: span,
|
sp: span,
|
||||||
+parse_method: ~str,
|
+parse_method: ~str,
|
||||||
+arg_exprs: ~[@ast::expr],
|
+arg_exprs: ~[@ast::expr],
|
||||||
|
|
|
@ -29,7 +29,7 @@ use core::vec;
|
||||||
// a given file into the current one.
|
// a given file into the current one.
|
||||||
|
|
||||||
/* line!(): expands to the current line number */
|
/* line!(): expands to the current line number */
|
||||||
pub fn expand_line(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_line(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
base::check_zero_tts(cx, sp, tts, "line!");
|
base::check_zero_tts(cx, sp, tts, "line!");
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ pub fn expand_line(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
}
|
}
|
||||||
|
|
||||||
/* col!(): expands to the current column number */
|
/* col!(): expands to the current column number */
|
||||||
pub fn expand_col(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_col(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
base::check_zero_tts(cx, sp, tts, "col!");
|
base::check_zero_tts(cx, sp, tts, "col!");
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ pub fn expand_col(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
/* file!(): expands to the current filename */
|
/* file!(): expands to the current filename */
|
||||||
/* The filemap (`loc.file`) contains a bunch more information we could spit
|
/* The filemap (`loc.file`) contains a bunch more information we could spit
|
||||||
* out if we wanted. */
|
* out if we wanted. */
|
||||||
pub fn expand_file(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_file(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
base::check_zero_tts(cx, sp, tts, "file!");
|
base::check_zero_tts(cx, sp, tts, "file!");
|
||||||
|
|
||||||
|
@ -62,13 +62,13 @@ pub fn expand_file(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
base::MRExpr(mk_base_str(cx, topmost.call_site, filename))
|
base::MRExpr(mk_base_str(cx, topmost.call_site, filename))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_stringify(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_stringify(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let s = pprust::tts_to_str(tts, cx.parse_sess().interner);
|
let s = pprust::tts_to_str(tts, cx.parse_sess().interner);
|
||||||
base::MRExpr(mk_base_str(cx, sp, s))
|
base::MRExpr(mk_base_str(cx, sp, s))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_mod(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_mod(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||||
base::MRExpr(mk_base_str(cx, sp,
|
base::MRExpr(mk_base_str(cx, sp,
|
||||||
|
@ -79,7 +79,7 @@ pub fn expand_mod(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
// include! : parse the given file as an expr
|
// include! : parse the given file as an expr
|
||||||
// This is generally a bad idea because it's going to behave
|
// This is generally a bad idea because it's going to behave
|
||||||
// unhygienically.
|
// unhygienically.
|
||||||
pub fn expand_include(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_include(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let file = get_single_str_from_tts(cx, sp, tts, "include!");
|
let file = get_single_str_from_tts(cx, sp, tts, "include!");
|
||||||
let p = parse::new_sub_parser_from_file(
|
let p = parse::new_sub_parser_from_file(
|
||||||
|
@ -89,7 +89,7 @@ pub fn expand_include(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
}
|
}
|
||||||
|
|
||||||
// include_str! : read the given file, insert it as a literal string expr
|
// include_str! : read the given file, insert it as a literal string expr
|
||||||
pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_include_str(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
|
let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
|
||||||
let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file)));
|
let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file)));
|
||||||
|
@ -103,7 +103,7 @@ pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
base::MRExpr(mk_base_str(cx, sp, result::unwrap(res)))
|
base::MRExpr(mk_base_str(cx, sp, result::unwrap(res)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: &[ast::token_tree])
|
pub fn expand_include_bin(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
||||||
-> base::MacResult {
|
-> base::MacResult {
|
||||||
let file = get_single_str_from_tts(cx, sp, tts, "include_bin!");
|
let file = get_single_str_from_tts(cx, sp, tts, "include_bin!");
|
||||||
match io::read_whole_file(&res_rel_file(cx, sp, &Path(file))) {
|
match io::read_whole_file(&res_rel_file(cx, sp, &Path(file))) {
|
||||||
|
@ -147,7 +147,7 @@ fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
|
||||||
|
|
||||||
// resolve a file-system path to an absolute file-system path (if it
|
// resolve a file-system path to an absolute file-system path (if it
|
||||||
// isn't already)
|
// isn't already)
|
||||||
fn res_rel_file(cx: ext_ctxt, sp: codemap::span, arg: &Path) -> Path {
|
fn res_rel_file(cx: @ext_ctxt, sp: codemap::span, arg: &Path) -> Path {
|
||||||
// NB: relative paths are resolved relative to the compilation unit
|
// NB: relative paths are resolved relative to the compilation unit
|
||||||
if !arg.is_absolute {
|
if !arg.is_absolute {
|
||||||
let cu = Path(cx.codemap().span_to_filename(sp));
|
let cu = Path(cx.codemap().span_to_filename(sp));
|
||||||
|
|
|
@ -17,8 +17,10 @@ use ext::base;
|
||||||
use parse::lexer::{new_tt_reader, reader};
|
use parse::lexer::{new_tt_reader, reader};
|
||||||
use parse::parser::Parser;
|
use parse::parser::Parser;
|
||||||
|
|
||||||
pub fn expand_trace_macros(cx: ext_ctxt, sp: span,
|
pub fn expand_trace_macros(cx: @ext_ctxt,
|
||||||
tt: &[ast::token_tree]) -> base::MacResult {
|
sp: span,
|
||||||
|
tt: &[ast::token_tree])
|
||||||
|
-> base::MacResult {
|
||||||
let sess = cx.parse_sess();
|
let sess = cx.parse_sess();
|
||||||
let cfg = cx.cfg();
|
let cfg = cx.cfg();
|
||||||
let tt_rdr = new_tt_reader(
|
let tt_rdr = new_tt_reader(
|
||||||
|
@ -27,7 +29,7 @@ pub fn expand_trace_macros(cx: ext_ctxt, sp: span,
|
||||||
None,
|
None,
|
||||||
vec::from_slice(tt)
|
vec::from_slice(tt)
|
||||||
);
|
);
|
||||||
let rdr = tt_rdr as reader;
|
let rdr = tt_rdr as @reader;
|
||||||
let rust_parser = Parser(
|
let rust_parser = Parser(
|
||||||
sess,
|
sess,
|
||||||
copy cfg,
|
copy cfg,
|
||||||
|
|
|
@ -224,7 +224,7 @@ pub enum parse_result {
|
||||||
pub fn parse_or_else(
|
pub fn parse_or_else(
|
||||||
sess: @mut ParseSess,
|
sess: @mut ParseSess,
|
||||||
+cfg: ast::crate_cfg,
|
+cfg: ast::crate_cfg,
|
||||||
rdr: reader,
|
rdr: @reader,
|
||||||
ms: ~[matcher]
|
ms: ~[matcher]
|
||||||
) -> HashMap<ident, @named_match> {
|
) -> HashMap<ident, @named_match> {
|
||||||
match parse(sess, cfg, rdr, ms) {
|
match parse(sess, cfg, rdr, ms) {
|
||||||
|
@ -237,7 +237,7 @@ pub fn parse_or_else(
|
||||||
pub fn parse(
|
pub fn parse(
|
||||||
sess: @mut ParseSess,
|
sess: @mut ParseSess,
|
||||||
cfg: ast::crate_cfg,
|
cfg: ast::crate_cfg,
|
||||||
rdr: reader,
|
rdr: @reader,
|
||||||
ms: ~[matcher]
|
ms: ~[matcher]
|
||||||
) -> parse_result {
|
) -> parse_result {
|
||||||
let mut cur_eis = ~[];
|
let mut cur_eis = ~[];
|
||||||
|
|
|
@ -27,8 +27,11 @@ use print;
|
||||||
|
|
||||||
use core::io;
|
use core::io;
|
||||||
|
|
||||||
pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
pub fn add_new_extension(cx: @ext_ctxt,
|
||||||
arg: ~[ast::token_tree]) -> base::MacResult {
|
sp: span,
|
||||||
|
name: ident,
|
||||||
|
arg: ~[ast::token_tree])
|
||||||
|
-> base::MacResult {
|
||||||
// these spans won't matter, anyways
|
// these spans won't matter, anyways
|
||||||
fn ms(m: matcher_) -> matcher {
|
fn ms(m: matcher_) -> matcher {
|
||||||
spanned { node: copy m, span: dummy_sp() }
|
spanned { node: copy m, span: dummy_sp() }
|
||||||
|
@ -54,8 +57,10 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||||
let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic,
|
let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic,
|
||||||
cx.parse_sess().interner, None, copy arg);
|
cx.parse_sess().interner, None, copy arg);
|
||||||
let argument_map = parse_or_else(cx.parse_sess(), cx.cfg(),
|
let argument_map = parse_or_else(cx.parse_sess(),
|
||||||
arg_reader as reader, argument_gram);
|
cx.cfg(),
|
||||||
|
arg_reader as @reader,
|
||||||
|
argument_gram);
|
||||||
|
|
||||||
// Extract the arguments:
|
// Extract the arguments:
|
||||||
let lhses = match argument_map.get(&lhs_nm) {
|
let lhses = match argument_map.get(&lhs_nm) {
|
||||||
|
@ -69,7 +74,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Given `lhses` and `rhses`, this is the new macro we create
|
// Given `lhses` and `rhses`, this is the new macro we create
|
||||||
fn generic_extension(cx: ext_ctxt, sp: span, name: ident,
|
fn generic_extension(cx: @ext_ctxt, sp: span, name: ident,
|
||||||
arg: &[ast::token_tree],
|
arg: &[ast::token_tree],
|
||||||
lhses: ~[@named_match], rhses: ~[@named_match])
|
lhses: ~[@named_match], rhses: ~[@named_match])
|
||||||
-> MacResult {
|
-> MacResult {
|
||||||
|
@ -98,7 +103,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
itr,
|
itr,
|
||||||
None,
|
None,
|
||||||
vec::from_slice(arg)
|
vec::from_slice(arg)
|
||||||
) as reader;
|
) as @reader;
|
||||||
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) {
|
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) {
|
||||||
success(named_matches) => {
|
success(named_matches) => {
|
||||||
let rhs = match rhses[i] {
|
let rhs = match rhses[i] {
|
||||||
|
@ -118,8 +123,9 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||||
let trncbr = new_tt_reader(s_d, itr, Some(named_matches),
|
let trncbr = new_tt_reader(s_d, itr, Some(named_matches),
|
||||||
rhs);
|
rhs);
|
||||||
let p = @Parser(cx.parse_sess(), cx.cfg(),
|
let p = @Parser(cx.parse_sess(),
|
||||||
trncbr as reader);
|
cx.cfg(),
|
||||||
|
trncbr as @reader);
|
||||||
|
|
||||||
// Let the context choose how to interpret the result.
|
// Let the context choose how to interpret the result.
|
||||||
// Weird, but useful for X-macros.
|
// Weird, but useful for X-macros.
|
||||||
|
@ -140,7 +146,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
|
||||||
cx.span_fatal(best_fail_spot, best_fail_msg);
|
cx.span_fatal(best_fail_spot, best_fail_msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
let exp: @fn(ext_ctxt, span, &[ast::token_tree]) -> MacResult =
|
let exp: @fn(@ext_ctxt, span, &[ast::token_tree]) -> MacResult =
|
||||||
|cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses);
|
|cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses);
|
||||||
|
|
||||||
return MRDef(MacroDef{
|
return MRDef(MacroDef{
|
||||||
|
|
|
@ -34,7 +34,7 @@ struct TtFrame {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TtReader {
|
pub struct TtReader {
|
||||||
sp_diag: span_handler,
|
sp_diag: @span_handler,
|
||||||
interner: @ident_interner,
|
interner: @ident_interner,
|
||||||
// the unzipped tree:
|
// the unzipped tree:
|
||||||
cur: @mut TtFrame,
|
cur: @mut TtFrame,
|
||||||
|
@ -50,7 +50,7 @@ pub struct TtReader {
|
||||||
/** This can do Macro-By-Example transcription. On the other hand, if
|
/** This can do Macro-By-Example transcription. On the other hand, if
|
||||||
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
|
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
|
||||||
* should) be none. */
|
* should) be none. */
|
||||||
pub fn new_tt_reader(sp_diag: span_handler,
|
pub fn new_tt_reader(sp_diag: @span_handler,
|
||||||
itr: @ident_interner,
|
itr: @ident_interner,
|
||||||
interp: Option<std::oldmap::HashMap<ident,@named_match>>,
|
interp: Option<std::oldmap::HashMap<ident,@named_match>>,
|
||||||
+src: ~[ast::token_tree])
|
+src: ~[ast::token_tree])
|
||||||
|
|
|
@ -48,26 +48,26 @@ pub trait ast_fold {
|
||||||
|
|
||||||
pub struct AstFoldFns {
|
pub struct AstFoldFns {
|
||||||
//unlike the others, item_ is non-trivial
|
//unlike the others, item_ is non-trivial
|
||||||
fold_crate: @fn(&crate_, span, ast_fold) -> (crate_, span),
|
fold_crate: @fn(&crate_, span, @ast_fold) -> (crate_, span),
|
||||||
fold_view_item: @fn(view_item_, ast_fold) -> view_item_,
|
fold_view_item: @fn(view_item_, @ast_fold) -> view_item_,
|
||||||
fold_foreign_item: @fn(@foreign_item, ast_fold) -> @foreign_item,
|
fold_foreign_item: @fn(@foreign_item, @ast_fold) -> @foreign_item,
|
||||||
fold_item: @fn(@item, ast_fold) -> Option<@item>,
|
fold_item: @fn(@item, @ast_fold) -> Option<@item>,
|
||||||
fold_struct_field: @fn(@struct_field, ast_fold) -> @struct_field,
|
fold_struct_field: @fn(@struct_field, @ast_fold) -> @struct_field,
|
||||||
fold_item_underscore: @fn(&item_, ast_fold) -> item_,
|
fold_item_underscore: @fn(&item_, @ast_fold) -> item_,
|
||||||
fold_method: @fn(@method, ast_fold) -> @method,
|
fold_method: @fn(@method, @ast_fold) -> @method,
|
||||||
fold_block: @fn(&blk_, span, ast_fold) -> (blk_, span),
|
fold_block: @fn(&blk_, span, @ast_fold) -> (blk_, span),
|
||||||
fold_stmt: @fn(&stmt_, span, ast_fold) -> (stmt_, span),
|
fold_stmt: @fn(&stmt_, span, @ast_fold) -> (stmt_, span),
|
||||||
fold_arm: @fn(&arm, ast_fold) -> arm,
|
fold_arm: @fn(&arm, @ast_fold) -> arm,
|
||||||
fold_pat: @fn(&pat_, span, ast_fold) -> (pat_, span),
|
fold_pat: @fn(&pat_, span, @ast_fold) -> (pat_, span),
|
||||||
fold_decl: @fn(&decl_, span, ast_fold) -> (decl_, span),
|
fold_decl: @fn(&decl_, span, @ast_fold) -> (decl_, span),
|
||||||
fold_expr: @fn(&expr_, span, ast_fold) -> (expr_, span),
|
fold_expr: @fn(&expr_, span, @ast_fold) -> (expr_, span),
|
||||||
fold_ty: @fn(&ty_, span, ast_fold) -> (ty_, span),
|
fold_ty: @fn(&ty_, span, @ast_fold) -> (ty_, span),
|
||||||
fold_mod: @fn(&_mod, ast_fold) -> _mod,
|
fold_mod: @fn(&_mod, @ast_fold) -> _mod,
|
||||||
fold_foreign_mod: @fn(&foreign_mod, ast_fold) -> foreign_mod,
|
fold_foreign_mod: @fn(&foreign_mod, @ast_fold) -> foreign_mod,
|
||||||
fold_variant: @fn(&variant_, span, ast_fold) -> (variant_, span),
|
fold_variant: @fn(&variant_, span, @ast_fold) -> (variant_, span),
|
||||||
fold_ident: @fn(ident, ast_fold) -> ident,
|
fold_ident: @fn(ident, @ast_fold) -> ident,
|
||||||
fold_path: @fn(@path, ast_fold) -> path,
|
fold_path: @fn(@path, @ast_fold) -> path,
|
||||||
fold_local: @fn(&local_, span, ast_fold) -> (local_, span),
|
fold_local: @fn(&local_, span, @ast_fold) -> (local_, span),
|
||||||
map_exprs: @fn(@fn(@expr) -> @expr, &[@expr]) -> ~[@expr],
|
map_exprs: @fn(@fn(@expr) -> @expr, &[@expr]) -> ~[@expr],
|
||||||
new_id: @fn(node_id) -> node_id,
|
new_id: @fn(node_id) -> node_id,
|
||||||
new_span: @fn(span) -> span
|
new_span: @fn(span) -> span
|
||||||
|
@ -436,8 +436,8 @@ fn noop_fold_decl(d: &decl_, fld: @ast_fold) -> decl_ {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn wrap<T>(f: @fn(&T, ast_fold) -> T)
|
pub fn wrap<T>(f: @fn(&T, @ast_fold) -> T)
|
||||||
-> @fn(&T, span, ast_fold) -> (T, span) {
|
-> @fn(&T, span, @ast_fold) -> (T, span) {
|
||||||
let result: @fn(&T, span, @ast_fold) -> (T, span) = |x, s, fld| {
|
let result: @fn(&T, span, @ast_fold) -> (T, span) = |x, s, fld| {
|
||||||
(f(x, fld), s)
|
(f(x, fld), s)
|
||||||
};
|
};
|
||||||
|
@ -879,13 +879,13 @@ impl ast_fold for AstFoldFns {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub impl ast_fold {
|
pub impl @ast_fold {
|
||||||
fn fold_attributes(&self, attrs: ~[attribute]) -> ~[attribute] {
|
fn fold_attributes(&self, attrs: ~[attribute]) -> ~[attribute] {
|
||||||
attrs.map(|x| fold_attribute_(*x, *self))
|
attrs.map(|x| fold_attribute_(*x, *self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_fold(afp: ast_fold_fns) -> ast_fold {
|
pub fn make_fold(afp: ast_fold_fns) -> @ast_fold {
|
||||||
afp as @ast_fold
|
afp as @ast_fold
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -256,7 +256,7 @@ fn read_block_comment(rdr: @mut StringReader,
|
||||||
while level > 0 {
|
while level > 0 {
|
||||||
debug!("=== block comment level %d", level);
|
debug!("=== block comment level %d", level);
|
||||||
if is_eof(rdr) {
|
if is_eof(rdr) {
|
||||||
(rdr as reader).fatal(~"unterminated block comment");
|
(rdr as @reader).fatal(~"unterminated block comment");
|
||||||
}
|
}
|
||||||
if rdr.curr == '\n' {
|
if rdr.curr == '\n' {
|
||||||
trim_whitespace_prefix_and_push_line(&mut lines, curr_line,
|
trim_whitespace_prefix_and_push_line(&mut lines, curr_line,
|
||||||
|
@ -319,9 +319,11 @@ pub struct lit {
|
||||||
pos: BytePos
|
pos: BytePos
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
pub fn gather_comments_and_literals(span_diagnostic:
|
||||||
|
@diagnostic::span_handler,
|
||||||
+path: ~str,
|
+path: ~str,
|
||||||
srdr: io::Reader) -> (~[cmnt], ~[lit]) {
|
srdr: @io::Reader)
|
||||||
|
-> (~[cmnt], ~[lit]) {
|
||||||
let src = @str::from_bytes(srdr.read_whole_stream());
|
let src = @str::from_bytes(srdr.read_whole_stream());
|
||||||
let itr = parse::token::mk_fake_ident_interner();
|
let itr = parse::token::mk_fake_ident_interner();
|
||||||
let cm = CodeMap::new();
|
let cm = CodeMap::new();
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub fn seq_sep_none() -> SeqSep {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn token_to_str(reader: reader, token: &token::Token) -> ~str {
|
pub fn token_to_str(reader: @reader, token: &token::Token) -> ~str {
|
||||||
token::to_str(reader.interner(), token)
|
token::to_str(reader.interner(), token)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,17 +31,17 @@ pub trait reader {
|
||||||
fn is_eof(@mut self) -> bool;
|
fn is_eof(@mut self) -> bool;
|
||||||
fn next_token(@mut self) -> TokenAndSpan;
|
fn next_token(@mut self) -> TokenAndSpan;
|
||||||
fn fatal(@mut self, ~str) -> !;
|
fn fatal(@mut self, ~str) -> !;
|
||||||
fn span_diag(@mut self) -> span_handler;
|
fn span_diag(@mut self) -> @span_handler;
|
||||||
pure fn interner(@mut self) -> @token::ident_interner;
|
pure fn interner(@mut self) -> @token::ident_interner;
|
||||||
fn peek(@mut self) -> TokenAndSpan;
|
fn peek(@mut self) -> TokenAndSpan;
|
||||||
fn dup(@mut self) -> reader;
|
fn dup(@mut self) -> @reader;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deriving_eq]
|
#[deriving_eq]
|
||||||
pub struct TokenAndSpan {tok: token::Token, sp: span}
|
pub struct TokenAndSpan {tok: token::Token, sp: span}
|
||||||
|
|
||||||
pub struct StringReader {
|
pub struct StringReader {
|
||||||
span_diagnostic: span_handler,
|
span_diagnostic: @span_handler,
|
||||||
src: @~str,
|
src: @~str,
|
||||||
// The absolute offset within the codemap of the next character to read
|
// The absolute offset within the codemap of the next character to read
|
||||||
pos: BytePos,
|
pos: BytePos,
|
||||||
|
@ -58,7 +58,7 @@ pub struct StringReader {
|
||||||
peek_span: span
|
peek_span: span
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_string_reader(span_diagnostic: span_handler,
|
pub fn new_string_reader(span_diagnostic: @span_handler,
|
||||||
filemap: @codemap::FileMap,
|
filemap: @codemap::FileMap,
|
||||||
itr: @token::ident_interner)
|
itr: @token::ident_interner)
|
||||||
-> @mut StringReader {
|
-> @mut StringReader {
|
||||||
|
@ -68,7 +68,7 @@ pub fn new_string_reader(span_diagnostic: span_handler,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* For comments.rs, which hackily pokes into 'pos' and 'curr' */
|
/* For comments.rs, which hackily pokes into 'pos' and 'curr' */
|
||||||
pub fn new_low_level_string_reader(span_diagnostic: span_handler,
|
pub fn new_low_level_string_reader(span_diagnostic: @span_handler,
|
||||||
filemap: @codemap::FileMap,
|
filemap: @codemap::FileMap,
|
||||||
itr: @token::ident_interner)
|
itr: @token::ident_interner)
|
||||||
-> @mut StringReader {
|
-> @mut StringReader {
|
||||||
|
@ -121,7 +121,7 @@ impl reader for StringReader {
|
||||||
fn fatal(@mut self, m: ~str) -> ! {
|
fn fatal(@mut self, m: ~str) -> ! {
|
||||||
self.span_diagnostic.span_fatal(copy self.peek_span, m)
|
self.span_diagnostic.span_fatal(copy self.peek_span, m)
|
||||||
}
|
}
|
||||||
fn span_diag(@mut self) -> span_handler { self.span_diagnostic }
|
fn span_diag(@mut self) -> @span_handler { self.span_diagnostic }
|
||||||
pure fn interner(@mut self) -> @token::ident_interner { self.interner }
|
pure fn interner(@mut self) -> @token::ident_interner { self.interner }
|
||||||
fn peek(@mut self) -> TokenAndSpan {
|
fn peek(@mut self) -> TokenAndSpan {
|
||||||
TokenAndSpan {
|
TokenAndSpan {
|
||||||
|
@ -129,7 +129,7 @@ impl reader for StringReader {
|
||||||
sp: copy self.peek_span,
|
sp: copy self.peek_span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn dup(@mut self) -> reader { dup_string_reader(self) as reader }
|
fn dup(@mut self) -> @reader { dup_string_reader(self) as @reader }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl reader for TtReader {
|
impl reader for TtReader {
|
||||||
|
@ -138,7 +138,7 @@ impl reader for TtReader {
|
||||||
fn fatal(@mut self, m: ~str) -> ! {
|
fn fatal(@mut self, m: ~str) -> ! {
|
||||||
self.sp_diag.span_fatal(copy self.cur_span, m);
|
self.sp_diag.span_fatal(copy self.cur_span, m);
|
||||||
}
|
}
|
||||||
fn span_diag(@mut self) -> span_handler { self.sp_diag }
|
fn span_diag(@mut self) -> @span_handler { self.sp_diag }
|
||||||
pure fn interner(@mut self) -> @token::ident_interner { self.interner }
|
pure fn interner(@mut self) -> @token::ident_interner { self.interner }
|
||||||
fn peek(@mut self) -> TokenAndSpan {
|
fn peek(@mut self) -> TokenAndSpan {
|
||||||
TokenAndSpan {
|
TokenAndSpan {
|
||||||
|
@ -146,7 +146,7 @@ impl reader for TtReader {
|
||||||
sp: copy self.cur_span,
|
sp: copy self.cur_span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn dup(@mut self) -> reader { dup_tt_reader(self) as reader }
|
fn dup(@mut self) -> @reader { dup_tt_reader(self) as @reader }
|
||||||
}
|
}
|
||||||
|
|
||||||
// EFFECT: advance peek_tok and peek_span to refer to the next token.
|
// EFFECT: advance peek_tok and peek_span to refer to the next token.
|
||||||
|
|
|
@ -48,7 +48,7 @@ pub mod obsolete;
|
||||||
pub struct ParseSess {
|
pub struct ParseSess {
|
||||||
cm: @codemap::CodeMap,
|
cm: @codemap::CodeMap,
|
||||||
next_id: node_id,
|
next_id: node_id,
|
||||||
span_diagnostic: span_handler,
|
span_diagnostic: @span_handler,
|
||||||
interner: @ident_interner,
|
interner: @ident_interner,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,8 +62,9 @@ pub fn new_parse_sess(demitter: Option<Emitter>) -> @mut ParseSess {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap)
|
pub fn new_parse_sess_special_handler(sh: @span_handler,
|
||||||
-> @mut ParseSess {
|
cm: @codemap::CodeMap)
|
||||||
|
-> @mut ParseSess {
|
||||||
@mut ParseSess {
|
@mut ParseSess {
|
||||||
cm: cm,
|
cm: cm,
|
||||||
next_id: 1,
|
next_id: 1,
|
||||||
|
@ -201,20 +202,19 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id {
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_parser_from_source_str(
|
pub fn new_parser_from_source_str(sess: @mut ParseSess,
|
||||||
sess: @mut ParseSess,
|
+cfg: ast::crate_cfg,
|
||||||
+cfg: ast::crate_cfg,
|
+name: ~str,
|
||||||
+name: ~str,
|
+ss: codemap::FileSubstr,
|
||||||
+ss: codemap::FileSubstr,
|
source: @~str)
|
||||||
source: @~str
|
-> Parser {
|
||||||
) -> Parser {
|
|
||||||
let filemap = sess.cm.new_filemap_w_substr(name, ss, source);
|
let filemap = sess.cm.new_filemap_w_substr(name, ss, source);
|
||||||
let srdr = lexer::new_string_reader(
|
let srdr = lexer::new_string_reader(
|
||||||
copy sess.span_diagnostic,
|
copy sess.span_diagnostic,
|
||||||
filemap,
|
filemap,
|
||||||
sess.interner
|
sess.interner
|
||||||
);
|
);
|
||||||
Parser(sess, cfg, srdr as reader)
|
Parser(sess, cfg, srdr as @reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the entire source file, return a parser
|
/// Read the entire source file, return a parser
|
||||||
|
@ -227,12 +227,10 @@ pub fn new_parser_result_from_file(
|
||||||
match io::read_whole_file_str(path) {
|
match io::read_whole_file_str(path) {
|
||||||
Ok(src) => {
|
Ok(src) => {
|
||||||
let filemap = sess.cm.new_filemap(path.to_str(), @src);
|
let filemap = sess.cm.new_filemap(path.to_str(), @src);
|
||||||
let srdr = lexer::new_string_reader(
|
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
|
||||||
copy sess.span_diagnostic,
|
filemap,
|
||||||
filemap,
|
sess.interner);
|
||||||
sess.interner
|
Ok(Parser(sess, cfg, srdr as @reader))
|
||||||
);
|
|
||||||
Ok(Parser(sess, cfg, srdr as reader))
|
|
||||||
|
|
||||||
}
|
}
|
||||||
Err(e) => Err(e)
|
Err(e) => Err(e)
|
||||||
|
@ -281,7 +279,7 @@ pub fn new_parser_from_tts(
|
||||||
None,
|
None,
|
||||||
tts
|
tts
|
||||||
);
|
);
|
||||||
Parser(sess, cfg, trdr as reader)
|
Parser(sess, cfg, trdr as @reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
// abort if necessary
|
// abort if necessary
|
||||||
|
|
|
@ -215,8 +215,8 @@ struct ParsedItemsAndViewItems {
|
||||||
|
|
||||||
pub fn Parser(sess: @mut ParseSess,
|
pub fn Parser(sess: @mut ParseSess,
|
||||||
+cfg: ast::crate_cfg,
|
+cfg: ast::crate_cfg,
|
||||||
+rdr: reader) -> Parser {
|
+rdr: @reader)
|
||||||
|
-> Parser {
|
||||||
let tok0 = copy rdr.next_token();
|
let tok0 = copy rdr.next_token();
|
||||||
let interner = rdr.interner();
|
let interner = rdr.interner();
|
||||||
|
|
||||||
|
@ -254,7 +254,7 @@ pub struct Parser {
|
||||||
tokens_consumed: @mut uint,
|
tokens_consumed: @mut uint,
|
||||||
restriction: @mut restriction,
|
restriction: @mut restriction,
|
||||||
quote_depth: @mut uint, // not (yet) related to the quasiquoter
|
quote_depth: @mut uint, // not (yet) related to the quasiquoter
|
||||||
reader: reader,
|
reader: @reader,
|
||||||
interner: @token::ident_interner,
|
interner: @token::ident_interner,
|
||||||
keywords: HashMap<~str, ()>,
|
keywords: HashMap<~str, ()>,
|
||||||
strict_keywords: HashMap<~str, ()>,
|
strict_keywords: HashMap<~str, ()>,
|
||||||
|
|
|
@ -76,7 +76,7 @@ pub fn end(s: @ps) {
|
||||||
pp::end(s.s);
|
pp::end(s.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rust_printer(writer: io::Writer, intr: @ident_interner) -> @ps {
|
pub fn rust_printer(writer: @io::Writer, intr: @ident_interner) -> @ps {
|
||||||
return @ps {
|
return @ps {
|
||||||
s: pp::mk_printer(writer, default_columns),
|
s: pp::mk_printer(writer, default_columns),
|
||||||
cm: None::<@CodeMap>,
|
cm: None::<@CodeMap>,
|
||||||
|
@ -100,10 +100,15 @@ pub const default_columns: uint = 78u;
|
||||||
// Requires you to pass an input filename and reader so that
|
// Requires you to pass an input filename and reader so that
|
||||||
// it can scan the input text for comments and literals to
|
// it can scan the input text for comments and literals to
|
||||||
// copy forward.
|
// copy forward.
|
||||||
pub fn print_crate(cm: @CodeMap, intr: @ident_interner,
|
pub fn print_crate(cm: @CodeMap,
|
||||||
span_diagnostic: diagnostic::span_handler,
|
intr: @ident_interner,
|
||||||
crate: @ast::crate, filename: ~str, in: io::Reader,
|
span_diagnostic: @diagnostic::span_handler,
|
||||||
out: io::Writer, ann: pp_ann, is_expanded: bool) {
|
crate: @ast::crate,
|
||||||
|
filename: ~str,
|
||||||
|
in: @io::Reader,
|
||||||
|
out: @io::Writer,
|
||||||
|
ann: pp_ann,
|
||||||
|
is_expanded: bool) {
|
||||||
let (cmnts, lits) = comments::gather_comments_and_literals(
|
let (cmnts, lits) = comments::gather_comments_and_literals(
|
||||||
span_diagnostic,
|
span_diagnostic,
|
||||||
copy filename,
|
copy filename,
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
pub trait i<T> { }
|
pub trait i<T> { }
|
||||||
|
|
||||||
pub fn f<T>() -> i<T> {
|
pub fn f<T>() -> @i<T> {
|
||||||
impl<T> i<T> for () { }
|
impl<T> i<T> for () { }
|
||||||
|
|
||||||
@() as @i<T>
|
@() as @i<T>
|
||||||
|
|
|
@ -58,6 +58,6 @@ fn cat(in_x : uint, in_y : int, in_name: ~str) -> cat {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let nyan : noisy = @cat(0, 2, ~"nyan") as @noisy;
|
let nyan : @noisy = @cat(0, 2, ~"nyan") as @noisy;
|
||||||
nyan.eat(); //~ ERROR type `@noisy` does not implement any method in scope named `eat`
|
nyan.eat(); //~ ERROR type `@noisy` does not implement any method in scope named `eat`
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,14 +14,14 @@ impl<A:Copy> repeat<A> for @A {
|
||||||
fn get() -> A { *self }
|
fn get() -> A { *self }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repeater<A:Copy>(v: @A) -> repeat<A> {
|
fn repeater<A:Copy>(v: @A) -> @repeat<A> {
|
||||||
// Note: owned kind is not necessary as A appears in the trait type
|
// Note: owned kind is not necessary as A appears in the trait type
|
||||||
@v as repeat::<A> // No
|
@v as @repeat::<A> // No
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// Error results because the type of is inferred to be
|
// Error results because the type of is inferred to be
|
||||||
// repeat<&blk/int> where blk is the lifetime of the block below.
|
// @repeat<&blk/int> where blk is the lifetime of the block below.
|
||||||
|
|
||||||
let y = { //~ ERROR reference is not valid
|
let y = { //~ ERROR reference is not valid
|
||||||
let x: &blk/int = &3;
|
let x: &blk/int = &3;
|
||||||
|
|
|
@ -26,22 +26,22 @@ fn to_foo<T:Copy>(t: T) {
|
||||||
// the fn body itself.
|
// the fn body itself.
|
||||||
let v = &3;
|
let v = &3;
|
||||||
struct F<T> { f: T }
|
struct F<T> { f: T }
|
||||||
let x = @F {f:t} as foo;
|
let x = @F {f:t} as @foo;
|
||||||
fail_unless!(x.foo(v) == 3);
|
fail_unless!(x.foo(v) == 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_foo_2<T:Copy>(t: T) -> foo {
|
fn to_foo_2<T:Copy>(t: T) -> @foo {
|
||||||
// Not OK---T may contain borrowed ptrs and it is going to escape
|
// Not OK---T may contain borrowed ptrs and it is going to escape
|
||||||
// as part of the returned foo value
|
// as part of the returned foo value
|
||||||
struct F<T> { f: T }
|
struct F<T> { f: T }
|
||||||
@F {f:t} as foo //~ ERROR value may contain borrowed pointers; use `&static` bound
|
@F {f:t} as @foo //~ ERROR value may contain borrowed pointers; use `&static` bound
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_foo_3<T:Copy + &static>(t: T) -> foo {
|
fn to_foo_3<T:Copy + &static>(t: T) -> @foo {
|
||||||
// OK---T may escape as part of the returned foo value, but it is
|
// OK---T may escape as part of the returned foo value, but it is
|
||||||
// owned and hence does not contain borrowed ptrs
|
// owned and hence does not contain borrowed ptrs
|
||||||
struct F<T> { f: T }
|
struct F<T> { f: T }
|
||||||
@F {f:t} as foo
|
@F {f:t} as @foo
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
|
@ -10,11 +10,11 @@
|
||||||
|
|
||||||
trait foo { fn foo(); }
|
trait foo { fn foo(); }
|
||||||
|
|
||||||
fn to_foo<T:Copy + foo>(t: T) -> foo {
|
fn to_foo<T:Copy + foo>(t: T) -> @foo {
|
||||||
@t as @foo //~ ERROR value may contain borrowed pointers; use `&static` bound
|
@t as @foo //~ ERROR value may contain borrowed pointers; use `&static` bound
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_foo2<T:Copy + foo + &static>(t: T) -> foo {
|
fn to_foo2<T:Copy + foo + &static>(t: T) -> @foo {
|
||||||
@t as @foo
|
@t as @foo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ use core::hashmap::linear::LinearMap;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let x: @Map<~str, ~str> = @LinearMap::new::<~str, ~str>() as
|
let x: @Map<~str, ~str> = @LinearMap::new::<~str, ~str>() as
|
||||||
Map::<~str, ~str>;
|
@Map::<~str, ~str>;
|
||||||
let y: @Map<uint, ~str> = @x;
|
let y: @Map<uint, ~str> = @x;
|
||||||
//~^ ERROR mismatched types: expected `@core::container::Map<uint,~str>`
|
//~^ ERROR mismatched types: expected `@core::container::Map<uint,~str>`
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ fn a_fn1(e: an_enum<'a>) -> an_enum<'b> {
|
||||||
return e; //~ ERROR mismatched types: expected `an_enum/&b` but found `an_enum/&a`
|
return e; //~ ERROR mismatched types: expected `an_enum/&b` but found `an_enum/&a`
|
||||||
}
|
}
|
||||||
|
|
||||||
fn a_fn2(e: a_trait<'a>) -> a_trait<'b> {
|
fn a_fn2(e: @a_trait<'a>) -> @a_trait<'b> {
|
||||||
return e; //~ ERROR mismatched types: expected `@a_trait/&b` but found `@a_trait/&a`
|
return e; //~ ERROR mismatched types: expected `@a_trait/&b` but found `@a_trait/&a`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ trait foo<'self> {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct with_foo<'self> {
|
struct with_foo<'self> {
|
||||||
f: foo<'self>
|
f: @foo<'self>
|
||||||
}
|
}
|
||||||
|
|
||||||
trait set_foo_foo {
|
trait set_foo_foo {
|
||||||
|
|
|
@ -27,12 +27,12 @@ impl get_ctxt for has_ctxt<'self> {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_v(gc: get_ctxt) -> uint {
|
fn get_v(gc: @get_ctxt) -> uint {
|
||||||
gc.get_ctxt().v
|
gc.get_ctxt().v
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let ctxt = ctxt { v: 22u };
|
let ctxt = ctxt { v: 22u };
|
||||||
let hc = has_ctxt { c: &ctxt };
|
let hc = has_ctxt { c: &ctxt };
|
||||||
fail_unless!(get_v(@hc as get_ctxt) == 22u);
|
fail_unless!(get_v(@hc as @get_ctxt) == 22u);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,10 +20,10 @@ impl<'self> get_ctxt<'self> for has_ctxt<'self> {
|
||||||
fn get_ctxt() -> &self/ctxt { self.c }
|
fn get_ctxt() -> &self/ctxt { self.c }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_gc() -> get_ctxt {
|
fn make_gc() -> @get_ctxt {
|
||||||
let ctxt = ctxt { v: 22u };
|
let ctxt = ctxt { v: 22u };
|
||||||
let hc = has_ctxt { c: &ctxt }; //~ ERROR illegal borrow
|
let hc = has_ctxt { c: &ctxt }; //~ ERROR illegal borrow
|
||||||
return @hc as get_ctxt;
|
return @hc as @get_ctxt;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
|
@ -12,7 +12,7 @@ trait get_ctxt {
|
||||||
fn get_ctxt() -> &self/uint;
|
fn get_ctxt() -> &self/uint;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_gc1(gc: get_ctxt/&a) -> get_ctxt/&b {
|
fn make_gc1(gc: @get_ctxt/&a) -> @get_ctxt/&b {
|
||||||
return gc; //~ ERROR mismatched types: expected `@get_ctxt/&b` but found `@get_ctxt/&a`
|
return gc; //~ ERROR mismatched types: expected `@get_ctxt/&b` but found `@get_ctxt/&a`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,8 +24,8 @@ impl get_ctxt/&self for Foo/&self {
|
||||||
fn get_ctxt() -> &self/uint { self.r }
|
fn get_ctxt() -> &self/uint { self.r }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_gc2(foo: Foo/&a) -> get_ctxt/&b {
|
fn make_gc2(foo: Foo/&a) -> @get_ctxt/&b {
|
||||||
return @foo as get_ctxt; //~ ERROR cannot infer an appropriate lifetime
|
return @foo as @get_ctxt; //~ ERROR cannot infer an appropriate lifetime
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
|
@ -24,7 +24,7 @@ impl<T:Copy> box_trait<T> for box_impl<T> {
|
||||||
fn set(t: T) { self.f = t; }
|
fn set(t: T) { self.f = t; }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_box_trait<T>(b: box_trait<@const T>, v: @const T) {
|
fn set_box_trait<T>(b: @box_trait<@const T>, v: @const T) {
|
||||||
b.set(v);
|
b.set(v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ fn set_box_impl<T>(b: box_impl<@const T>, v: @const T) {
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let b = box_impl::<@int>(box::<@int> {f: @3});
|
let b = box_impl::<@int>(box::<@int> {f: @3});
|
||||||
set_box_trait(@b as box_trait::<@int>, @mut 5);
|
set_box_trait(@b as @box_trait::<@int>, @mut 5);
|
||||||
//~^ ERROR values differ in mutability
|
//~^ ERROR values differ in mutability
|
||||||
set_box_impl(b, @mut 5);
|
set_box_impl(b, @mut 5);
|
||||||
//~^ ERROR values differ in mutability
|
//~^ ERROR values differ in mutability
|
||||||
|
|
|
@ -15,5 +15,5 @@ impl bar for uint { fn dup() -> uint { self } fn blah<X>() {} }
|
||||||
fn main() {
|
fn main() {
|
||||||
10i.dup::<int>(); //~ ERROR does not take type parameters
|
10i.dup::<int>(); //~ ERROR does not take type parameters
|
||||||
10i.blah::<int, int>(); //~ ERROR incorrect number of type parameters
|
10i.blah::<int, int>(); //~ ERROR incorrect number of type parameters
|
||||||
(@10 as bar).dup(); //~ ERROR contains a self-type
|
(@10 as @bar).dup(); //~ ERROR contains a self-type
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ impl TraitB for int {
|
||||||
|
|
||||||
fn call_it<B:TraitB>(b: B) -> int {
|
fn call_it<B:TraitB>(b: B) -> int {
|
||||||
let y = 4u;
|
let y = 4u;
|
||||||
b.gimme_an_a(y) //~ ERROR failed to find an implementation of trait @TraitA
|
b.gimme_an_a(y) //~ ERROR failed to find an implementation of trait TraitA
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
trait clam<A:Copy> {
|
trait c lam<A:Copy> {
|
||||||
fn chowder(y: A);
|
fn chowder(y: A);
|
||||||
}
|
}
|
||||||
struct foo<A> {
|
struct foo<A> {
|
||||||
|
@ -26,13 +26,13 @@ fn foo<A:Copy>(b: A) -> foo<A> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn f<A:Copy>(x: clam<A>, a: A) {
|
fn f<A:Copy>(x: @clam<A>, a: A) {
|
||||||
x.chowder(a);
|
x.chowder(a);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
|
|
||||||
let c = foo(42);
|
let c = foo(42);
|
||||||
let d: clam<int> = @c as clam::<int>;
|
let d: @clam<int> = @c as @clam::<int>;
|
||||||
f(d, c.x);
|
f(d, c.x);
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
trait hax { }
|
trait hax { }
|
||||||
impl<A> hax for A { }
|
impl<A> hax for A { }
|
||||||
|
|
||||||
fn perform_hax<T:&static>(x: @T) -> hax {
|
fn perform_hax<T:&static>(x: @T) -> @hax {
|
||||||
@x as @hax
|
@x as @hax
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub fn main() {
|
||||||
// let y = @({a: 4i});
|
// let y = @({a: 4i});
|
||||||
// let z = @({a: 4i} as it);
|
// let z = @({a: 4i} as it);
|
||||||
// let z = @({a: true} as it);
|
// let z = @({a: true} as it);
|
||||||
let z = @(@true as it);
|
let z = @(@true as @it);
|
||||||
// x.f();
|
// x.f();
|
||||||
// y.f();
|
// y.f();
|
||||||
// (*z).f();
|
// (*z).f();
|
||||||
|
|
|
@ -14,9 +14,9 @@ impl<A:Copy> repeat<A> for @A {
|
||||||
fn get() -> A { *self }
|
fn get() -> A { *self }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repeater<A:Copy>(v: @A) -> repeat<A> {
|
fn repeater<A:Copy>(v: @A) -> @repeat<A> {
|
||||||
// Note: owned kind is not necessary as A appears in the trait type
|
// Note: owned kind is not necessary as A appears in the trait type
|
||||||
@v as repeat::<A> // No
|
@v as @repeat::<A> // No
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
|
|
|
@ -29,7 +29,7 @@ impl<A:Copy + Serializable> Serializable for F<A> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serializer for io::Writer {
|
impl Serializer for @io::Writer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue