1
Fork 0

libsyntax: Fix errors arising from the automated ~[T] conversion

This commit is contained in:
Patrick Walton 2014-02-28 12:54:01 -08:00
parent 58fd6ab90d
commit 198cc3d850
54 changed files with 577 additions and 306 deletions

View file

@ -16,6 +16,7 @@ use clone::Clone;
use cmp::{Eq, Ordering, TotalEq, TotalOrd}; use cmp::{Eq, Ordering, TotalEq, TotalOrd};
use container::Container; use container::Container;
use default::Default; use default::Default;
use fmt;
use iter::{DoubleEndedIterator, FromIterator, Iterator}; use iter::{DoubleEndedIterator, FromIterator, Iterator};
use libc::{free, c_void}; use libc::{free, c_void};
use mem::{size_of, move_val_init}; use mem::{size_of, move_val_init};
@ -82,6 +83,26 @@ impl<T: Clone> Vec<T> {
self.push((*element).clone()) self.push((*element).clone())
} }
} }
pub fn grow(&mut self, n: uint, initval: &T) {
let new_len = self.len() + n;
self.reserve(new_len);
let mut i: uint = 0u;
while i < n {
self.push((*initval).clone());
i += 1u;
}
}
pub fn grow_set(&mut self, index: uint, initval: &T, val: T) {
let l = self.len();
if index >= l {
self.grow(index - l + 1u, initval);
}
*self.get_mut(index) = val;
}
} }
impl<T:Clone> Clone for Vec<T> { impl<T:Clone> Clone for Vec<T> {
@ -388,6 +409,12 @@ impl<T> Default for Vec<T> {
} }
} }
impl<T:fmt::Show> fmt::Show for Vec<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_slice().fmt(f)
}
}
pub struct MoveItems<T> { pub struct MoveItems<T> {
priv allocation: *mut c_void, // the block of memory allocated for the vector priv allocation: *mut c_void, // the block of memory allocated for the vector
priv iter: Items<'static, T> priv iter: Items<'static, T>

View file

@ -119,7 +119,7 @@ pub fn lookup(name: &str) -> Option<Abi> {
} }
pub fn all_names() -> Vec<&'static str> { pub fn all_names() -> Vec<&'static str> {
AbiDatas.map(|d| d.name) AbiDatas.iter().map(|d| d.name).collect()
} }
impl Abi { impl Abi {

View file

@ -23,6 +23,7 @@ use std::cell::RefCell;
use collections::HashMap; use collections::HashMap;
use std::option::Option; use std::option::Option;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
/// A pointer abstraction. FIXME(eddyb) #10676 use Rc<T> in the future. /// A pointer abstraction. FIXME(eddyb) #10676 use Rc<T> in the future.
@ -1193,6 +1194,8 @@ mod test {
use codemap::*; use codemap::*;
use super::*; use super::*;
use std::vec_ng::Vec;
fn is_freeze<T: Freeze>() {} fn is_freeze<T: Freeze>() {}
// Assert that the AST remains Freeze (#10693). // Assert that the AST remains Freeze (#10693).

View file

@ -23,6 +23,7 @@ use std::cell::RefCell;
use std::iter; use std::iter;
use std::vec; use std::vec;
use std::fmt; use std::fmt;
use std::vec_ng::Vec;
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
pub enum PathElem { pub enum PathElem {
@ -191,7 +192,11 @@ pub struct Map {
impl Map { impl Map {
fn find_entry(&self, id: NodeId) -> Option<MapEntry> { fn find_entry(&self, id: NodeId) -> Option<MapEntry> {
let map = self.map.borrow(); let map = self.map.borrow();
map.get().get(id as uint).map(|x| *x) if map.get().len() > id as uint {
Some(*map.get().get(id as uint))
} else {
None
}
} }
/// Retrieve the Node corresponding to `id`, failing if it cannot /// Retrieve the Node corresponding to `id`, failing if it cannot

View file

@ -23,6 +23,7 @@ use std::cmp;
use collections::HashMap; use collections::HashMap;
use std::u32; use std::u32;
use std::local_data; use std::local_data;
use std::vec_ng::Vec;
pub fn path_name_i(idents: &[Ident]) -> ~str { pub fn path_name_i(idents: &[Ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad") // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
@ -795,7 +796,7 @@ pub fn resolve_internal(id : Ident,
let resolved = { let resolved = {
let result = { let result = {
let table = table.table.borrow(); let table = table.table.borrow();
table.get()[id.ctxt] *table.get().get(id.ctxt as uint)
}; };
match result { match result {
EmptyCtxt => id.name, EmptyCtxt => id.name,
@ -844,7 +845,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> Vec<Mrk>
loop { loop {
let table_entry = { let table_entry = {
let table = table.table.borrow(); let table = table.table.borrow();
table.get()[loopvar] *table.get().get(loopvar as uint)
}; };
match table_entry { match table_entry {
EmptyCtxt => { EmptyCtxt => {
@ -873,7 +874,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> Vec<Mrk>
pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk { pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk {
let sctable = get_sctable(); let sctable = get_sctable();
let table = sctable.table.borrow(); let table = sctable.table.borrow();
match table.get()[ctxt] { match *table.get().get(ctxt as uint) {
ast::Mark(mrk,_) => mrk, ast::Mark(mrk,_) => mrk,
_ => fail!("can't retrieve outer mark when outside is not a mark") _ => fail!("can't retrieve outer mark when outside is not a mark")
} }
@ -901,7 +902,7 @@ pub fn getLast(arr: &Vec<Mrk> ) -> Mrk {
pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
(a.span == b.span) (a.span == b.span)
&& (a.global == b.global) && (a.global == b.global)
&& (segments_name_eq(a.segments, b.segments)) && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice()))
} }
// are two arrays of segments equal when compared unhygienically? // are two arrays of segments equal when compared unhygienically?
@ -938,6 +939,8 @@ mod test {
use opt_vec; use opt_vec;
use collections::HashMap; use collections::HashMap;
use std::vec_ng::Vec;
fn ident_to_segment(id : &Ident) -> PathSegment { fn ident_to_segment(id : &Ident) -> PathSegment {
PathSegment {identifier:id.clone(), PathSegment {identifier:id.clone(),
lifetimes: opt_vec::Empty, lifetimes: opt_vec::Empty,
@ -1000,7 +1003,7 @@ mod test {
let mut result = Vec::new(); let mut result = Vec::new();
loop { loop {
let table = table.table.borrow(); let table = table.table.borrow();
match table.get()[sc] { match *table.get().get(sc as uint) {
EmptyCtxt => {return result;}, EmptyCtxt => {return result;},
Mark(mrk,tail) => { Mark(mrk,tail) => {
result.push(M(mrk)); result.push(M(mrk));
@ -1024,9 +1027,9 @@ mod test {
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4); assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
{ {
let table = t.table.borrow(); let table = t.table.borrow();
assert!(table.get()[2] == Mark(9,0)); assert!(*table.get().get(2) == Mark(9,0));
assert!(table.get()[3] == Rename(id(101,0),14,2)); assert!(*table.get().get(3) == Rename(id(101,0),14,2));
assert!(table.get()[4] == Mark(3,3)); assert!(*table.get().get(4) == Mark(3,3));
} }
assert_eq!(refold_test_sc(4,&t),test_sc); assert_eq!(refold_test_sc(4,&t),test_sc);
} }
@ -1045,8 +1048,8 @@ mod test {
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3); assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
{ {
let table = t.table.borrow(); let table = t.table.borrow();
assert!(table.get()[2] == Mark(7,0)); assert!(*table.get().get(2) == Mark(7,0));
assert!(table.get()[3] == Mark(3,2)); assert!(*table.get().get(3) == Mark(3,2));
} }
} }

View file

@ -21,6 +21,7 @@ use parse::token;
use crateid::CrateId; use crateid::CrateId;
use collections::HashSet; use collections::HashSet;
use std::vec_ng::Vec;
pub trait AttrMetaMethods { pub trait AttrMetaMethods {
// This could be changed to `fn check_name(&self, name: InternedString) -> // This could be changed to `fn check_name(&self, name: InternedString) ->
@ -226,7 +227,8 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> Vec<@MetaItem> {
match m.node { match m.node {
MetaList(ref n, ref mis) => { MetaList(ref n, ref mis) => {
@Spanned { @Spanned {
node: MetaList((*n).clone(), sort_meta_items(*mis)), node: MetaList((*n).clone(),
sort_meta_items(mis.as_slice())),
.. /*bad*/ (*m).clone() .. /*bad*/ (*m).clone()
} }
} }
@ -243,7 +245,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> Vec<@MetaItem> {
let mut result = Vec::new(); let mut result = Vec::new();
for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) { for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) {
match attr.meta().node { match attr.meta().node {
MetaList(_, ref items) => result.push_all(*items), MetaList(_, ref items) => result.push_all(items.as_slice()),
_ => () _ => ()
} }
} }
@ -272,9 +274,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
match attr.node.value.node { match attr.node.value.node {
MetaWord(ref n) if n.equiv(&("inline")) => InlineHint, MetaWord(ref n) if n.equiv(&("inline")) => InlineHint,
MetaList(ref n, ref items) if n.equiv(&("inline")) => { MetaList(ref n, ref items) if n.equiv(&("inline")) => {
if contains_name(*items, "always") { if contains_name(items.as_slice(), "always") {
InlineAlways InlineAlways
} else if contains_name(*items, "never") { } else if contains_name(items.as_slice(), "never") {
InlineNever InlineNever
} else { } else {
InlineHint InlineHint

View file

@ -23,6 +23,7 @@ source code snippets, etc.
use std::cell::RefCell; use std::cell::RefCell;
use std::cmp; use std::cmp;
use std::vec_ng::Vec;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
pub trait Pos { pub trait Pos {
@ -224,14 +225,14 @@ impl FileMap {
// the new charpos must be > the last one (or it's the first one). // the new charpos must be > the last one (or it's the first one).
let mut lines = self.lines.borrow_mut();; let mut lines = self.lines.borrow_mut();;
let line_len = lines.get().len(); let line_len = lines.get().len();
assert!(line_len == 0 || (lines.get()[line_len - 1] < pos)) assert!(line_len == 0 || (*lines.get().get(line_len - 1) < pos))
lines.get().push(pos); lines.get().push(pos);
} }
// get a line from the list of pre-computed line-beginnings // get a line from the list of pre-computed line-beginnings
pub fn get_line(&self, line: int) -> ~str { pub fn get_line(&self, line: int) -> ~str {
let mut lines = self.lines.borrow_mut(); let mut lines = self.lines.borrow_mut();
let begin: BytePos = lines.get()[line] - self.start_pos; let begin: BytePos = *lines.get().get(line as uint) - self.start_pos;
let begin = begin.to_uint(); let begin = begin.to_uint();
let slice = self.src.slice_from(begin); let slice = self.src.slice_from(begin);
match slice.find('\n') { match slice.find('\n') {
@ -373,7 +374,7 @@ impl CodeMap {
let mut b = len; let mut b = len;
while b - a > 1u { while b - a > 1u {
let m = (a + b) / 2u; let m = (a + b) / 2u;
if files[m].start_pos > pos { if files.get(m).start_pos > pos {
b = m; b = m;
} else { } else {
a = m; a = m;
@ -383,7 +384,7 @@ impl CodeMap {
// filemap, but are not the filemaps we want (because they are length 0, they cannot // filemap, but are not the filemaps we want (because they are length 0, they cannot
// contain what we are looking for). So, rewind until we find a useful filemap. // contain what we are looking for). So, rewind until we find a useful filemap.
loop { loop {
let lines = files[a].lines.borrow(); let lines = files.get(a).lines.borrow();
let lines = lines.get(); let lines = lines.get();
if lines.len() > 0 { if lines.len() > 0 {
break; break;
@ -405,13 +406,13 @@ impl CodeMap {
let idx = self.lookup_filemap_idx(pos); let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow(); let files = self.files.borrow();
let f = files.get()[idx]; let f = *files.get().get(idx);
let mut a = 0u; let mut a = 0u;
let mut lines = f.lines.borrow_mut(); let mut lines = f.lines.borrow_mut();
let mut b = lines.get().len(); let mut b = lines.get().len();
while b - a > 1u { while b - a > 1u {
let m = (a + b) / 2u; let m = (a + b) / 2u;
if lines.get()[m] > pos { b = m; } else { a = m; } if *lines.get().get(m) > pos { b = m; } else { a = m; }
} }
return FileMapAndLine {fm: f, line: a}; return FileMapAndLine {fm: f, line: a};
} }
@ -421,7 +422,7 @@ impl CodeMap {
let line = a + 1u; // Line numbers start at 1 let line = a + 1u; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos); let chpos = self.bytepos_to_file_charpos(pos);
let lines = f.lines.borrow(); let lines = f.lines.borrow();
let linebpos = lines.get()[a]; let linebpos = *lines.get().get(a);
let linechpos = self.bytepos_to_file_charpos(linebpos); let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("codemap: byte pos {:?} is on the line at byte pos {:?}", debug!("codemap: byte pos {:?} is on the line at byte pos {:?}",
pos, linebpos); pos, linebpos);
@ -440,7 +441,7 @@ impl CodeMap {
-> FileMapAndBytePos { -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos); let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow(); let files = self.files.borrow();
let fm = files.get()[idx]; let fm = *files.get().get(idx);
let offset = bpos - fm.start_pos; let offset = bpos - fm.start_pos;
return FileMapAndBytePos {fm: fm, pos: offset}; return FileMapAndBytePos {fm: fm, pos: offset};
} }
@ -450,7 +451,7 @@ impl CodeMap {
debug!("codemap: converting {:?} to char pos", bpos); debug!("codemap: converting {:?} to char pos", bpos);
let idx = self.lookup_filemap_idx(bpos); let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow(); let files = self.files.borrow();
let map = files.get()[idx]; let map = files.get().get(idx);
// The number of extra bytes due to multibyte chars in the FileMap // The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0; let mut total_extra_bytes = 0;

View file

@ -19,6 +19,7 @@ use std::fmt;
/// to be `0.0`. /// to be `0.0`.
use std::from_str::FromStr; use std::from_str::FromStr;
use std::vec_ng::Vec;
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
pub struct CrateId { pub struct CrateId {
@ -49,7 +50,7 @@ impl fmt::Show for CrateId {
impl FromStr for CrateId { impl FromStr for CrateId {
fn from_str(s: &str) -> Option<CrateId> { fn from_str(s: &str) -> Option<CrateId> {
let pieces: Vec<&str> = s.splitn('#', 1).collect(); let pieces: Vec<&str> = s.splitn('#', 1).collect();
let path = pieces[0].to_owned(); let path = pieces.get(0).to_owned();
if path.starts_with("/") || path.ends_with("/") || if path.starts_with("/") || path.ends_with("/") ||
path.starts_with(".") || path.is_empty() { path.starts_with(".") || path.is_empty() {
@ -57,16 +58,18 @@ impl FromStr for CrateId {
} }
let path_pieces: Vec<&str> = path.rsplitn('/', 1).collect(); let path_pieces: Vec<&str> = path.rsplitn('/', 1).collect();
let inferred_name = path_pieces[0]; let inferred_name = *path_pieces.get(0);
let (name, version) = if pieces.len() == 1 { let (name, version) = if pieces.len() == 1 {
(inferred_name.to_owned(), None) (inferred_name.to_owned(), None)
} else { } else {
let hash_pieces: Vec<&str> = pieces[1].splitn(':', 1).collect(); let hash_pieces: Vec<&str> = pieces.get(1)
.splitn(':', 1)
.collect();
let (hash_name, hash_version) = if hash_pieces.len() == 1 { let (hash_name, hash_version) = if hash_pieces.len() == 1 {
("", hash_pieces[0]) ("", *hash_pieces.get(0))
} else { } else {
(hash_pieces[0], hash_pieces[1]) (*hash_pieces.get(0), *hash_pieces.get(1))
}; };
let name = if !hash_name.is_empty() { let name = if !hash_name.is_empty() {
@ -89,7 +92,7 @@ impl FromStr for CrateId {
}; };
Some(CrateId { Some(CrateId {
path: path, path: path.clone(),
name: name, name: name,
version: version, version: version,
}) })

View file

@ -325,7 +325,7 @@ fn highlight_lines(err: &mut EmitterWriter,
if lines.lines.len() == 1u { if lines.lines.len() == 1u {
let lo = cm.lookup_char_pos(sp.lo); let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0u; let mut digits = 0u;
let mut num = (lines.lines[0] + 1u) / 10u; let mut num = (*lines.lines.get(0) + 1u) / 10u;
// how many digits must be indent past? // how many digits must be indent past?
while num > 0u { num /= 10u; digits += 1u; } while num > 0u { num /= 10u; digits += 1u; }
@ -337,7 +337,7 @@ fn highlight_lines(err: &mut EmitterWriter,
// part of the 'filename:line ' part of the previous line. // part of the 'filename:line ' part of the previous line.
let skip = fm.name.len() + digits + 3u; let skip = fm.name.len() + digits + 3u;
for _ in range(0, skip) { s.push_char(' '); } for _ in range(0, skip) { s.push_char(' '); }
let orig = fm.get_line(lines.lines[0] as int); let orig = fm.get_line(*lines.lines.get(0) as int);
for pos in range(0u, left-skip) { for pos in range(0u, left-skip) {
let curChar = orig[pos] as char; let curChar = orig[pos] as char;
// Whenever a tab occurs on the previous line, we insert one on // Whenever a tab occurs on the previous line, we insert one on

View file

@ -20,6 +20,8 @@ use parse;
use parse::token::InternedString; use parse::token::InternedString;
use parse::token; use parse::token;
use std::vec_ng::Vec;
enum State { enum State {
Asm, Asm,
Outputs, Outputs,
@ -42,7 +44,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> base::MacResult { -> base::MacResult {
let mut p = parse::new_parser_from_tts(cx.parse_sess(), let mut p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), cx.cfg(),
tts.to_owned()); tts.iter()
.map(|x| (*x).clone())
.collect());
let mut asm = InternedString::new(""); let mut asm = InternedString::new("");
let mut asm_str_style = None; let mut asm_str_style = None;

View file

@ -20,6 +20,7 @@ use parse::token::{InternedString, intern, str_to_ident};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use collections::HashMap; use collections::HashMap;
use std::vec_ng::Vec;
// new-style macro! tt code: // new-style macro! tt code:
// //
@ -461,7 +462,9 @@ pub fn get_exprs_from_tts(cx: &ExtCtxt,
tts: &[ast::TokenTree]) -> Option<Vec<@ast::Expr> > { tts: &[ast::TokenTree]) -> Option<Vec<@ast::Expr> > {
let mut p = parse::new_parser_from_tts(cx.parse_sess(), let mut p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), cx.cfg(),
tts.to_owned()); tts.iter()
.map(|x| (*x).clone())
.collect());
let mut es = Vec::new(); let mut es = Vec::new();
while p.token != token::EOF { while p.token != token::EOF {
if es.len() != 0 && !p.eat(&token::COMMA) { if es.len() != 0 && !p.eat(&token::COMMA) {
@ -553,6 +556,7 @@ impl SyntaxEnv {
} }
pub fn info<'a>(&'a mut self) -> &'a mut BlockInfo { pub fn info<'a>(&'a mut self) -> &'a mut BlockInfo {
&mut self.chain[self.chain.len()-1].info let last_chain_index = self.chain.len() - 1;
&mut self.chain.get_mut(last_chain_index).info
} }
} }

View file

@ -21,6 +21,8 @@ use opt_vec::OptVec;
use parse::token::special_idents; use parse::token::special_idents;
use parse::token; use parse::token;
use std::vec_ng::Vec;
pub struct Field { pub struct Field {
ident: ast::Ident, ident: ast::Ident,
ex: @ast::Expr ex: @ast::Expr
@ -132,7 +134,7 @@ pub trait AstBuilder {
fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr; fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr;
fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_vec_uniq(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_vec_ng(&self, sp: Span) -> @ast::Expr;
fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr; fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr;
fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr; fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr;
@ -580,8 +582,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
self.expr(sp, ast::ExprVec(exprs, ast::MutImmutable)) self.expr(sp, ast::ExprVec(exprs, ast::MutImmutable))
} }
fn expr_vec_uniq(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { fn expr_vec_ng(&self, sp: Span) -> @ast::Expr {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreUniq) self.expr_call_global(sp,
vec!(self.ident_of("std"),
self.ident_of("vec_ng"),
self.ident_of("Vec"),
self.ident_of("new")),
Vec::new())
} }
fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice) self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
@ -701,14 +708,12 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.expr(span, ast::ExprFnBlock(fn_decl, blk)) self.expr(span, ast::ExprFnBlock(fn_decl, blk))
} }
fn lambda0(&self, _span: Span, blk: P<ast::Block>) -> @ast::Expr { fn lambda0(&self, span: Span, blk: P<ast::Block>) -> @ast::Expr {
let blk_e = self.expr(blk.span, ast::ExprBlock(blk)); self.lambda(span, Vec::new(), blk)
quote_expr!(self, || $blk_e )
} }
fn lambda1(&self, _span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr { fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr {
let blk_e = self.expr(blk.span, ast::ExprBlock(blk)); self.lambda(span, vec!(ident), blk)
quote_expr!(self, |$ident| $blk_e )
} }
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , expr: @ast::Expr) -> @ast::Expr { fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , expr: @ast::Expr) -> @ast::Expr {
@ -721,7 +726,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.lambda1(span, self.block_expr(expr), ident) self.lambda1(span, self.block_expr(expr), ident)
} }
fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident> , stmts: Vec<@ast::Stmt> ) -> @ast::Expr { fn lambda_stmts(&self,
span: Span,
ids: Vec<ast::Ident>,
stmts: Vec<@ast::Stmt>)
-> @ast::Expr {
self.lambda(span, ids, self.block(span, stmts, None)) self.lambda(span, ids, self.block(span, stmts, None))
} }
fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr { fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr {
@ -921,7 +930,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.view_use(sp, vis, self.view_use(sp, vis,
vec!(@respan(sp, vec!(@respan(sp,
ast::ViewPathList(self.path(sp, path), ast::ViewPathList(self.path(sp, path),
imports, imports.iter()
.map(|x| *x)
.collect(),
ast::DUMMY_NODE_ID)))) ast::DUMMY_NODE_ID))))
} }

View file

@ -17,6 +17,7 @@ use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use std::char; use std::char;
use std::vec_ng::Vec;
pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult {
// Gather all argument expressions // Gather all argument expressions

View file

@ -26,10 +26,14 @@ use parse::token::InternedString;
use parse::token; use parse::token;
use parse; use parse;
use std::vec_ng::Vec;
pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult {
let mut p = parse::new_parser_from_tts(cx.parse_sess(), let mut p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), cx.cfg(),
tts.to_owned()); tts.iter()
.map(|x| (*x).clone())
.collect());
let mut cfgs = Vec::new(); let mut cfgs = Vec::new();
// parse `cfg!(meta_item, meta_item(x,y), meta_item="foo", ...)` // parse `cfg!(meta_item, meta_item(x,y), meta_item="foo", ...)`
@ -42,7 +46,8 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M
// test_cfg searches for meta items looking like `cfg(foo, ...)` // test_cfg searches for meta items looking like `cfg(foo, ...)`
let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)]; let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)];
let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x)); let matches_cfg = attr::test_cfg(cx.cfg().as_slice(),
in_cfg.iter().map(|&x| x));
let e = cx.expr_bool(sp, matches_cfg); let e = cx.expr_bool(sp, matches_cfg);
MRExpr(e) MRExpr(e)
} }

View file

@ -14,6 +14,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_clone(cx: &mut ExtCtxt, pub fn expand_deriving_clone(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,
@ -99,7 +101,7 @@ fn cs_clone(
name)) name))
} }
if all_fields.len() >= 1 && all_fields[0].name.is_none() { if all_fields.len() >= 1 && all_fields.get(0).name.is_none() {
// enum-like // enum-like
let subcalls = all_fields.map(subcall); let subcalls = all_fields.map(subcall);
cx.expr_call_ident(trait_span, ctor_ident, subcalls) cx.expr_call_ident(trait_span, ctor_ident, subcalls)

View file

@ -14,6 +14,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_eq(cx: &mut ExtCtxt, pub fn expand_deriving_eq(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -15,6 +15,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_ord(cx: &mut ExtCtxt, pub fn expand_deriving_ord(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -14,6 +14,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_totaleq(cx: &mut ExtCtxt, pub fn expand_deriving_totaleq(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -14,7 +14,9 @@ use codemap::Span;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::cmp::{Ordering, Equal, Less, Greater}; use std::cmp::{Ordering, Equal, Less, Greater};
use std::vec_ng::Vec;
pub fn expand_deriving_totalord(cx: &mut ExtCtxt, pub fn expand_deriving_totalord(cx: &mut ExtCtxt,
span: Span, span: Span,

View file

@ -21,6 +21,8 @@ use ext::deriving::generic::*;
use parse::token::InternedString; use parse::token::InternedString;
use parse::token; use parse::token;
use std::vec_ng::Vec;
pub fn expand_deriving_decodable(cx: &mut ExtCtxt, pub fn expand_deriving_decodable(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -14,6 +14,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_default(cx: &mut ExtCtxt, pub fn expand_deriving_default(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -89,6 +89,8 @@ use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use parse::token; use parse::token;
use std::vec_ng::Vec;
pub fn expand_deriving_encodable(cx: &mut ExtCtxt, pub fn expand_deriving_encodable(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -188,7 +188,8 @@ use opt_vec;
use parse::token::InternedString; use parse::token::InternedString;
use parse::token; use parse::token;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
pub use self::ty::*; pub use self::ty::*;
mod ty; mod ty;
@ -410,7 +411,7 @@ impl<'a> TraitDef<'a> {
cx.item( cx.item(
self.span, self.span,
ident, ident,
vec_ng::append(vec!(doc_attr), self.attributes), vec_ng::append(vec!(doc_attr), self.attributes.as_slice()),
ast::ItemImpl(trait_generics, opt_trait_ref, ast::ItemImpl(trait_generics, opt_trait_ref,
self_type, methods.map(|x| *x))) self_type, methods.map(|x| *x)))
} }
@ -431,13 +432,15 @@ impl<'a> TraitDef<'a> {
self, self,
struct_def, struct_def,
type_ident, type_ident,
self_args, nonself_args) self_args.as_slice(),
nonself_args.as_slice())
} else { } else {
method_def.expand_struct_method_body(cx, method_def.expand_struct_method_body(cx,
self, self,
struct_def, struct_def,
type_ident, type_ident,
self_args, nonself_args) self_args.as_slice(),
nonself_args.as_slice())
}; };
method_def.create_method(cx, self, method_def.create_method(cx, self,
@ -465,13 +468,15 @@ impl<'a> TraitDef<'a> {
self, self,
enum_def, enum_def,
type_ident, type_ident,
self_args, nonself_args) self_args.as_slice(),
nonself_args.as_slice())
} else { } else {
method_def.expand_enum_method_body(cx, method_def.expand_enum_method_body(cx,
self, self,
enum_def, enum_def,
type_ident, type_ident,
self_args, nonself_args) self_args.as_slice(),
nonself_args.as_slice())
}; };
method_def.create_method(cx, self, method_def.create_method(cx, self,
@ -666,14 +671,15 @@ impl<'a> MethodDef<'a> {
// transpose raw_fields // transpose raw_fields
let fields = if raw_fields.len() > 0 { let fields = if raw_fields.len() > 0 {
raw_fields[0].iter() raw_fields.get(0)
.enumerate() .iter()
.map(|(i, &(span, opt_id, field))| { .enumerate()
let other_fields = raw_fields.tail().map(|l| { .map(|(i, &(span, opt_id, field))| {
match &l[i] { let other_fields = raw_fields.tail().iter().map(|l| {
match l.get(i) {
&(_, _, ex) => ex &(_, _, ex) => ex
} }
}); }).collect();
FieldInfo { FieldInfo {
span: span, span: span,
name: opt_id, name: opt_id,
@ -820,17 +826,17 @@ impl<'a> MethodDef<'a> {
Some(variant_index) => { Some(variant_index) => {
// `ref` inside let matches is buggy. Causes havoc wih rusc. // `ref` inside let matches is buggy. Causes havoc wih rusc.
// let (variant_index, ref self_vec) = matches_so_far[0]; // let (variant_index, ref self_vec) = matches_so_far[0];
let (variant, self_vec) = match matches_so_far[0] { let (variant, self_vec) = match matches_so_far.get(0) {
(_, v, ref s) => (v, s) &(_, v, ref s) => (v, s)
}; };
let mut enum_matching_fields = vec::from_elem(self_vec.len(), Vec::new()); let mut enum_matching_fields = Vec::from_elem(self_vec.len(), Vec::new());
for triple in matches_so_far.tail().iter() { for triple in matches_so_far.tail().iter() {
match triple { match triple {
&(_, _, ref other_fields) => { &(_, _, ref other_fields) => {
for (i, &(_, _, e)) in other_fields.iter().enumerate() { for (i, &(_, _, e)) in other_fields.iter().enumerate() {
enum_matching_fields[i].push(e); enum_matching_fields.get_mut(i).push(e);
} }
} }
} }
@ -849,7 +855,7 @@ impl<'a> MethodDef<'a> {
substructure = EnumMatching(variant_index, variant, field_tuples); substructure = EnumMatching(variant_index, variant, field_tuples);
} }
None => { None => {
substructure = EnumNonMatching(*matches_so_far); substructure = EnumNonMatching(matches_so_far.as_slice());
} }
} }
self.call_substructure_method(cx, trait_, type_ident, self.call_substructure_method(cx, trait_, type_ident,
@ -877,7 +883,7 @@ impl<'a> MethodDef<'a> {
}; };
// matching-variant match // matching-variant match
let variant = enum_def.variants[index]; let variant = *enum_def.variants.get(index);
let (pattern, idents) = trait_.create_enum_variant_pattern(cx, let (pattern, idents) = trait_.create_enum_variant_pattern(cx,
variant, variant,
current_match_str, current_match_str,
@ -1149,11 +1155,19 @@ pub fn cs_fold(use_foldl: bool,
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
if use_foldl { if use_foldl {
all_fields.iter().fold(base, |old, field| { all_fields.iter().fold(base, |old, field| {
f(cx, field.span, old, field.self_, field.other) f(cx,
field.span,
old,
field.self_,
field.other.as_slice())
}) })
} else { } else {
all_fields.rev_iter().fold(base, |old, field| { all_fields.rev_iter().fold(base, |old, field| {
f(cx, field.span, old, field.self_, field.other) f(cx,
field.span,
old,
field.self_,
field.other.as_slice())
}) })
} }
}, },

View file

@ -14,6 +14,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_hash(cx: &mut ExtCtxt, pub fn expand_deriving_hash(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -16,6 +16,8 @@ use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use parse::token::InternedString; use parse::token::InternedString;
use std::vec_ng::Vec;
pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -16,6 +16,8 @@ use ext::build::{AstBuilder};
use ext::deriving::generic::*; use ext::deriving::generic::*;
use opt_vec; use opt_vec;
use std::vec_ng::Vec;
pub fn expand_deriving_rand(cx: &mut ExtCtxt, pub fn expand_deriving_rand(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,
@ -64,7 +66,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
let rand_call = |cx: &mut ExtCtxt, span| { let rand_call = |cx: &mut ExtCtxt, span| {
cx.expr_call_global(span, cx.expr_call_global(span,
rand_ident.clone(), rand_ident.clone(),
vec!( rng[0] )) vec!( *rng.get(0) ))
}; };
return match *substr.fields { return match *substr.fields {
@ -90,7 +92,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
// ::std::rand::Rand::rand(rng) // ::std::rand::Rand::rand(rng)
let rv_call = cx.expr_call(trait_span, let rv_call = cx.expr_call(trait_span,
rand_name, rand_name,
vec!( rng[0] )); vec!( *rng.get(0) ));
// need to specify the uint-ness of the random number // need to specify the uint-ness of the random number
let uint_ty = cx.ty_ident(trait_span, cx.ident_of("uint")); let uint_ty = cx.ty_ident(trait_span, cx.ident_of("uint"));

View file

@ -19,6 +19,7 @@ use ext::deriving::generic::*;
use parse::token; use parse::token;
use collections::HashMap; use collections::HashMap;
use std::vec_ng::Vec;
pub fn expand_deriving_show(cx: &mut ExtCtxt, pub fn expand_deriving_show(cx: &mut ExtCtxt,
span: Span, span: Span,
@ -79,7 +80,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
EnumMatching(_, _, ref fields) if fields.len() == 0 => {} EnumMatching(_, _, ref fields) if fields.len() == 0 => {}
Struct(ref fields) | EnumMatching(_, _, ref fields) => { Struct(ref fields) | EnumMatching(_, _, ref fields) => {
if fields[0].name.is_none() { if fields.get(0).name.is_none() {
// tuple struct/"normal" variant // tuple struct/"normal" variant
format_string.push_str("("); format_string.push_str("(");
@ -135,6 +136,6 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
// phew, not our responsibility any more! // phew, not our responsibility any more!
format::expand_preparsed_format_args(cx, span, format::expand_preparsed_format_args(cx, span,
format_closure, format_closure,
format_string, exprs, ~[], format_string, exprs, Vec::new(),
HashMap::new()) HashMap::new())
} }

View file

@ -21,6 +21,8 @@ use codemap::{Span,respan};
use opt_vec; use opt_vec;
use opt_vec::OptVec; use opt_vec::OptVec;
use std::vec_ng::Vec;
/// The types of pointers /// The types of pointers
pub enum PtrTy<'a> { pub enum PtrTy<'a> {
Send, // ~ Send, // ~
@ -188,10 +190,10 @@ impl<'a> Ty<'a> {
fn mk_ty_param(cx: &ExtCtxt, span: Span, name: &str, bounds: &[Path], fn mk_ty_param(cx: &ExtCtxt, span: Span, name: &str, bounds: &[Path],
self_ident: Ident, self_generics: &Generics) -> ast::TyParam { self_ident: Ident, self_generics: &Generics) -> ast::TyParam {
let bounds = opt_vec::from( let bounds = opt_vec::from(
bounds.map(|b| { bounds.iter().map(|b| {
let path = b.to_path(cx, span, self_ident, self_generics); let path = b.to_path(cx, span, self_ident, self_generics);
cx.typarambound(path) cx.typarambound(path)
})); }).collect());
cx.typaram(cx.ident_of(name), bounds, None) cx.typaram(cx.ident_of(name), bounds, None)
} }
@ -204,8 +206,8 @@ fn mk_generics(lifetimes: Vec<ast::Lifetime> , ty_params: Vec<ast::TyParam> ) -
/// Lifetimes and bounds on type parameters /// Lifetimes and bounds on type parameters
pub struct LifetimeBounds<'a> { pub struct LifetimeBounds<'a> {
lifetimes: Vec<&'a str> , lifetimes: Vec<&'a str>,
bounds: vec!((&'a str, Vec<Path<'a>> )) bounds: Vec<(&'a str, Vec<Path<'a>>)>,
} }
impl<'a> LifetimeBounds<'a> { impl<'a> LifetimeBounds<'a> {
@ -226,7 +228,12 @@ impl<'a> LifetimeBounds<'a> {
let ty_params = self.bounds.map(|t| { let ty_params = self.bounds.map(|t| {
match t { match t {
&(ref name, ref bounds) => { &(ref name, ref bounds) => {
mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics) mk_ty_param(cx,
span,
*name,
bounds.as_slice(),
self_ty,
self_generics)
} }
} }
}); });

View file

@ -14,6 +14,8 @@ use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use std::vec_ng::Vec;
pub fn expand_deriving_zero(cx: &mut ExtCtxt, pub fn expand_deriving_zero(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: @MetaItem, mitem: @MetaItem,

View file

@ -19,6 +19,7 @@ use codemap::Span;
use ext::base::*; use ext::base::*;
use ext::base; use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use opt_vec;
use parse::token; use parse::token;
use std::os; use std::os;
@ -31,8 +32,30 @@ pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
}; };
let e = match os::getenv(var) { let e = match os::getenv(var) {
None => quote_expr!(cx, ::std::option::None::<&'static str>), None => {
Some(s) => quote_expr!(cx, ::std::option::Some($s)) cx.expr_path(cx.path_all(sp,
true,
vec!(cx.ident_of("std"),
cx.ident_of("option"),
cx.ident_of("None")),
opt_vec::Empty,
vec!(cx.ty_rptr(sp,
cx.ty_ident(sp,
cx.ident_of("str")),
Some(cx.lifetime(sp,
cx.ident_of(
"static").name)),
ast::MutImmutable))))
}
Some(s) => {
cx.expr_call_global(sp,
vec!(cx.ident_of("std"),
cx.ident_of("option"),
cx.ident_of("Some")),
vec!(cx.expr_str(sp,
token::intern_and_get_ident(
s))))
}
}; };
MRExpr(e) MRExpr(e)
} }
@ -48,7 +71,9 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
Some(exprs) => exprs Some(exprs) => exprs
}; };
let var = match expr_to_str(cx, exprs[0], "expected string literal") { let var = match expr_to_str(cx,
*exprs.get(0),
"expected string literal") {
None => return MacResult::dummy_expr(sp), None => return MacResult::dummy_expr(sp),
Some((v, _style)) => v Some((v, _style)) => v
}; };
@ -59,7 +84,7 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
var)) var))
} }
2 => { 2 => {
match expr_to_str(cx, exprs[1], "expected string literal") { match expr_to_str(cx, *exprs.get(1), "expected string literal") {
None => return MacResult::dummy_expr(sp), None => return MacResult::dummy_expr(sp),
Some((s, _style)) => s Some((s, _style)) => s
} }

View file

@ -31,6 +31,7 @@ use util::small_vector::SmallVector;
use std::cast; use std::cast;
use std::unstable::dynamic_lib::DynamicLibrary; use std::unstable::dynamic_lib::DynamicLibrary;
use std::os; use std::os;
use std::vec_ng::Vec;
pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
match e.node { match e.node {
@ -53,7 +54,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
// let compilation continue // let compilation continue
return MacResult::raw_dummy_expr(e.span); return MacResult::raw_dummy_expr(e.span);
} }
let extname = pth.segments[0].identifier; let extname = pth.segments.get(0).identifier;
let extnamestr = token::get_ident(extname); let extnamestr = token::get_ident(extname);
// leaving explicit deref here to highlight unbox op: // leaving explicit deref here to highlight unbox op:
let marked_after = match fld.extsbox.find(&extname.name) { let marked_after = match fld.extsbox.find(&extname.name) {
@ -77,7 +78,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
}); });
let fm = fresh_mark(); let fm = fresh_mark();
// mark before: // mark before:
let marked_before = mark_tts(*tts,fm); let marked_before = mark_tts(tts.as_slice(), fm);
// The span that we pass to the expanders we want to // The span that we pass to the expanders we want to
// be the root of the call stack. That's the most // be the root of the call stack. That's the most
@ -87,7 +88,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
let expanded = match expandfun.expand(fld.cx, let expanded = match expandfun.expand(fld.cx,
mac_span.call_site, mac_span.call_site,
marked_before) { marked_before.as_slice()) {
MRExpr(e) => e, MRExpr(e) => e,
MRAny(any_macro) => any_macro.make_expr(), MRAny(any_macro) => any_macro.make_expr(),
_ => { _ => {
@ -181,7 +182,10 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
// `match i.next() { ... }` // `match i.next() { ... }`
let match_expr = { let match_expr = {
let next_call_expr = let next_call_expr =
fld.cx.expr_method_call(span, fld.cx.expr_path(local_path), next_ident, Vec::new()); fld.cx.expr_method_call(span,
fld.cx.expr_path(local_path),
next_ident,
Vec::new());
fld.cx.expr_match(span, next_call_expr, vec!(none_arm, some_arm)) fld.cx.expr_match(span, next_call_expr, vec!(none_arm, some_arm))
}; };
@ -276,7 +280,7 @@ pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander)
ast::ItemMac(..) => expand_item_mac(it, fld), ast::ItemMac(..) => expand_item_mac(it, fld),
ast::ItemMod(_) | ast::ItemForeignMod(_) => { ast::ItemMod(_) | ast::ItemForeignMod(_) => {
fld.cx.mod_push(it.ident); fld.cx.mod_push(it.ident);
let macro_escape = contains_macro_escape(it.attrs); let macro_escape = contains_macro_escape(it.attrs.as_slice());
let result = with_exts_frame!(fld.extsbox, let result = with_exts_frame!(fld.extsbox,
macro_escape, macro_escape,
noop_fold_item(it, fld)); noop_fold_item(it, fld));
@ -309,7 +313,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
_ => fld.cx.span_bug(it.span, "invalid item macro invocation") _ => fld.cx.span_bug(it.span, "invalid item macro invocation")
}; };
let extname = pth.segments[0].identifier; let extname = pth.segments.get(0).identifier;
let extnamestr = token::get_ident(extname); let extnamestr = token::get_ident(extname);
let fm = fresh_mark(); let fm = fresh_mark();
let expanded = match fld.extsbox.find(&extname.name) { let expanded = match fld.extsbox.find(&extname.name) {
@ -339,8 +343,8 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
} }
}); });
// mark before expansion: // mark before expansion:
let marked_before = mark_tts(tts,fm); let marked_before = mark_tts(tts.as_slice(), fm);
expander.expand(fld.cx, it.span, marked_before) expander.expand(fld.cx, it.span, marked_before.as_slice())
} }
Some(&IdentTT(ref expander, span)) => { Some(&IdentTT(ref expander, span)) => {
if it.ident.name == parse::token::special_idents::invalid.name { if it.ident.name == parse::token::special_idents::invalid.name {
@ -358,7 +362,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
} }
}); });
// mark before expansion: // mark before expansion:
let marked_tts = mark_tts(tts,fm); let marked_tts = mark_tts(tts.as_slice(), fm);
expander.expand(fld.cx, it.span, it.ident, marked_tts) expander.expand(fld.cx, it.span, it.ident, marked_tts)
} }
_ => { _ => {
@ -391,7 +395,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
// yikes... no idea how to apply the mark to this. I'm afraid // yikes... no idea how to apply the mark to this. I'm afraid
// we're going to have to wait-and-see on this one. // we're going to have to wait-and-see on this one.
fld.extsbox.insert(intern(name), ext); fld.extsbox.insert(intern(name), ext);
if attr::contains_name(it.attrs, "macro_export") { if attr::contains_name(it.attrs.as_slice(), "macro_export") {
SmallVector::one(it) SmallVector::one(it)
} else { } else {
SmallVector::zero() SmallVector::zero()
@ -504,7 +508,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
fld.cx.span_err(pth.span, "expected macro name without module separators"); fld.cx.span_err(pth.span, "expected macro name without module separators");
return SmallVector::zero(); return SmallVector::zero();
} }
let extname = pth.segments[0].identifier; let extname = pth.segments.get(0).identifier;
let extnamestr = token::get_ident(extname); let extnamestr = token::get_ident(extname);
let marked_after = match fld.extsbox.find(&extname.name) { let marked_after = match fld.extsbox.find(&extname.name) {
None => { None => {
@ -523,7 +527,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
}); });
let fm = fresh_mark(); let fm = fresh_mark();
// mark before expansion: // mark before expansion:
let marked_tts = mark_tts(tts,fm); let marked_tts = mark_tts(tts.as_slice(), fm);
// See the comment in expand_expr for why we want the original span, // See the comment in expand_expr for why we want the original span,
// not the current mac.span. // not the current mac.span.
@ -531,7 +535,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
let expanded = match expandfun.expand(fld.cx, let expanded = match expandfun.expand(fld.cx,
mac_span.call_site, mac_span.call_site,
marked_tts) { marked_tts.as_slice()) {
MRExpr(e) => { MRExpr(e) => {
@codemap::Spanned { @codemap::Spanned {
node: StmtExpr(e, ast::DUMMY_NODE_ID), node: StmtExpr(e, ast::DUMMY_NODE_ID),
@ -676,7 +680,8 @@ impl Visitor<()> for NewNameFinderContext {
span: _, span: _,
segments: ref segments segments: ref segments
} if segments.len() == 1 => { } if segments.len() == 1 => {
self.ident_accumulator.push(segments[0].identifier) self.ident_accumulator.push(segments.get(0)
.identifier)
} }
// I believe these must be enums... // I believe these must be enums...
_ => () _ => ()
@ -843,7 +848,7 @@ impl Folder for Marker {
let macro = match m.node { let macro = match m.node {
MacInvocTT(ref path, ref tts, ctxt) => { MacInvocTT(ref path, ref tts, ctxt) => {
MacInvocTT(self.fold_path(path), MacInvocTT(self.fold_path(path),
fold_tts(*tts, self), fold_tts(tts.as_slice(), self),
new_mark(self.mark, ctxt)) new_mark(self.mark, ctxt))
} }
}; };
@ -912,6 +917,8 @@ mod test {
use visit; use visit;
use visit::Visitor; use visit::Visitor;
use std::vec_ng::Vec;
// a visitor that extracts the paths // a visitor that extracts the paths
// from a given thingy and puts them in a mutable // from a given thingy and puts them in a mutable
// array (passed in to the traversal) // array (passed in to the traversal)
@ -1015,9 +1022,9 @@ mod test {
let attr2 = make_dummy_attr ("bar"); let attr2 = make_dummy_attr ("bar");
let escape_attr = make_dummy_attr ("macro_escape"); let escape_attr = make_dummy_attr ("macro_escape");
let attrs1 = vec!(attr1, escape_attr, attr2); let attrs1 = vec!(attr1, escape_attr, attr2);
assert_eq!(contains_macro_escape (attrs1),true); assert_eq!(contains_macro_escape(attrs1.as_slice()),true);
let attrs2 = vec!(attr1,attr2); let attrs2 = vec!(attr1,attr2);
assert_eq!(contains_macro_escape (attrs2),false); assert_eq!(contains_macro_escape(attrs2.as_slice()),false);
} }
// make a MetaWord outer attribute with the given name // make a MetaWord outer attribute with the given name
@ -1082,7 +1089,7 @@ mod test {
// in principle, you might want to control this boolean on a per-varref basis, // in principle, you might want to control this boolean on a per-varref basis,
// but that would make things even harder to understand, and might not be // but that would make things even harder to understand, and might not be
// necessary for thorough testing. // necessary for thorough testing.
type RenamingTest = (&'static str, vec!(Vec<uint> ), bool); type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
#[test] #[test]
fn automatic_renaming () { fn automatic_renaming () {
@ -1131,8 +1138,8 @@ mod test {
// must be one check clause for each binding: // must be one check clause for each binding:
assert_eq!(bindings.len(),bound_connections.len()); assert_eq!(bindings.len(),bound_connections.len());
for (binding_idx,shouldmatch) in bound_connections.iter().enumerate() { for (binding_idx,shouldmatch) in bound_connections.iter().enumerate() {
let binding_name = mtwt_resolve(bindings[binding_idx]); let binding_name = mtwt_resolve(*bindings.get(binding_idx));
let binding_marks = mtwt_marksof(bindings[binding_idx].ctxt,invalid_name); let binding_marks = mtwt_marksof(bindings.get(binding_idx).ctxt,invalid_name);
// shouldmatch can't name varrefs that don't exist: // shouldmatch can't name varrefs that don't exist:
assert!((shouldmatch.len() == 0) || assert!((shouldmatch.len() == 0) ||
(varrefs.len() > *shouldmatch.iter().max().unwrap())); (varrefs.len() > *shouldmatch.iter().max().unwrap()));
@ -1141,13 +1148,18 @@ mod test {
// it should be a path of length 1, and it should // it should be a path of length 1, and it should
// be free-identifier=? or bound-identifier=? to the given binding // be free-identifier=? or bound-identifier=? to the given binding
assert_eq!(varref.segments.len(),1); assert_eq!(varref.segments.len(),1);
let varref_name = mtwt_resolve(varref.segments[0].identifier); let varref_name = mtwt_resolve(varref.segments
let varref_marks = mtwt_marksof(varref.segments[0].identifier.ctxt, .get(0)
.identifier);
let varref_marks = mtwt_marksof(varref.segments
.get(0)
.identifier
.ctxt,
invalid_name); invalid_name);
if !(varref_name==binding_name) { if !(varref_name==binding_name) {
println!("uh oh, should match but doesn't:"); println!("uh oh, should match but doesn't:");
println!("varref: {:?}",varref); println!("varref: {:?}",varref);
println!("binding: {:?}", bindings[binding_idx]); println!("binding: {:?}", *bindings.get(binding_idx));
ast_util::display_sctable(get_sctable()); ast_util::display_sctable(get_sctable());
} }
assert_eq!(varref_name,binding_name); assert_eq!(varref_name,binding_name);
@ -1158,7 +1170,8 @@ mod test {
} }
} else { } else {
let fail = (varref.segments.len() == 1) let fail = (varref.segments.len() == 1)
&& (mtwt_resolve(varref.segments[0].identifier) == binding_name); && (mtwt_resolve(varref.segments.get(0).identifier) ==
binding_name);
// temp debugging: // temp debugging:
if fail { if fail {
println!("failure on test {}",test_idx); println!("failure on test {}",test_idx);
@ -1167,11 +1180,13 @@ mod test {
println!("uh oh, matches but shouldn't:"); println!("uh oh, matches but shouldn't:");
println!("varref: {:?}",varref); println!("varref: {:?}",varref);
// good lord, you can't make a path with 0 segments, can you? // good lord, you can't make a path with 0 segments, can you?
let string = token::get_ident(varref.segments[0].identifier); let string = token::get_ident(varref.segments
.get(0)
.identifier);
println!("varref's first segment's uint: {}, and string: \"{}\"", println!("varref's first segment's uint: {}, and string: \"{}\"",
varref.segments[0].identifier.name, varref.segments.get(0).identifier.name,
string.get()); string.get());
println!("binding: {:?}", bindings[binding_idx]); println!("binding: {:?}", *bindings.get(binding_idx));
ast_util::display_sctable(get_sctable()); ast_util::display_sctable(get_sctable());
} }
assert!(!fail); assert!(!fail);
@ -1197,7 +1212,7 @@ foo_module!()
let string = ident.get(); let string = ident.get();
"xx" == string "xx" == string
}).collect(); }).collect();
let cxbinds: &[&ast::Ident] = cxbinds; let cxbinds: &[&ast::Ident] = cxbinds.as_slice();
let cxbind = match cxbinds { let cxbind = match cxbinds {
[b] => b, [b] => b,
_ => fail!("expected just one binding for ext_cx") _ => fail!("expected just one binding for ext_cx")
@ -1211,16 +1226,17 @@ foo_module!()
// the xx binding should bind all of the xx varrefs: // the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| { for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1 p.segments.len() == 1
&& "xx" == token::get_ident(p.segments[0].identifier).get() && "xx" == token::get_ident(p.segments.get(0).identifier).get()
}).enumerate() { }).enumerate() {
if mtwt_resolve(v.segments[0].identifier) != resolved_binding { if mtwt_resolve(v.segments.get(0).identifier) !=
resolved_binding {
println!("uh oh, xx binding didn't match xx varref:"); println!("uh oh, xx binding didn't match xx varref:");
println!("this is xx varref \\# {:?}",idx); println!("this is xx varref \\# {:?}",idx);
println!("binding: {:?}",cxbind); println!("binding: {:?}",cxbind);
println!("resolves to: {:?}",resolved_binding); println!("resolves to: {:?}",resolved_binding);
println!("varref: {:?}",v.segments[0].identifier); println!("varref: {:?}",v.segments.get(0).identifier);
println!("resolves to: {:?}", println!("resolves to: {:?}",
mtwt_resolve(v.segments[0].identifier)); mtwt_resolve(v.segments.get(0).identifier));
let table = get_sctable(); let table = get_sctable();
println!("SC table:"); println!("SC table:");
@ -1231,7 +1247,8 @@ foo_module!()
} }
} }
} }
assert_eq!(mtwt_resolve(v.segments[0].identifier),resolved_binding); assert_eq!(mtwt_resolve(v.segments.get(0).identifier),
resolved_binding);
}; };
} }

View file

@ -22,6 +22,7 @@ use rsparse = parse;
use std::fmt::parse; use std::fmt::parse;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use std::vec; use std::vec;
use std::vec_ng::Vec;
#[deriving(Eq)] #[deriving(Eq)]
enum ArgumentType { enum ArgumentType {
@ -49,7 +50,7 @@ struct Context<'a> {
// were declared in. // were declared in.
names: HashMap<~str, @ast::Expr>, names: HashMap<~str, @ast::Expr>,
name_types: HashMap<~str, ArgumentType>, name_types: HashMap<~str, ArgumentType>,
name_ordering: ~[~str], name_ordering: Vec<~str>,
// Collection of the compiled `rt::Piece` structures // Collection of the compiled `rt::Piece` structures
pieces: Vec<@ast::Expr> , pieces: Vec<@ast::Expr> ,
@ -70,15 +71,17 @@ struct Context<'a> {
/// Some((fmtstr, unnamed arguments, ordering of named arguments, /// Some((fmtstr, unnamed arguments, ordering of named arguments,
/// named arguments)) /// named arguments))
fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, ~[~str], -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>,
HashMap<~str, @ast::Expr>)>) { HashMap<~str, @ast::Expr>)>) {
let mut args = Vec::new(); let mut args = Vec::new();
let mut names = HashMap::<~str, @ast::Expr>::new(); let mut names = HashMap::<~str, @ast::Expr>::new();
let mut order = ~[]; let mut order = Vec::new();
let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(), let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),
ecx.cfg(), ecx.cfg(),
tts.to_owned()); tts.iter()
.map(|x| (*x).clone())
.collect());
// Parse the leading function expression (maybe a block, maybe a path) // Parse the leading function expression (maybe a block, maybe a path)
let extra = p.parse_expr(); let extra = p.parse_expr();
if !p.eat(&token::COMMA) { if !p.eat(&token::COMMA) {
@ -275,14 +278,14 @@ impl<'a> Context<'a> {
return; return;
} }
{ {
let arg_type = match self.arg_types[arg] { let arg_type = match self.arg_types.get(arg) {
None => None, &None => None,
Some(ref x) => Some(x) &Some(ref x) => Some(x)
}; };
self.verify_same(self.args[arg].span, &ty, arg_type); self.verify_same(self.args.get(arg).span, &ty, arg_type);
} }
if self.arg_types[arg].is_none() { if self.arg_types.get(arg).is_none() {
self.arg_types[arg] = Some(ty); *self.arg_types.get_mut(arg) = Some(ty);
} }
} }
@ -653,7 +656,9 @@ impl<'a> Context<'a> {
// of each variable because we don't want to move out of the arguments // of each variable because we don't want to move out of the arguments
// passed to this function. // passed to this function.
for (i, &e) in self.args.iter().enumerate() { for (i, &e) in self.args.iter().enumerate() {
if self.arg_types[i].is_none() { continue } // error already generated if self.arg_types.get(i).is_none() {
continue // error already generated
}
let name = self.ecx.ident_of(format!("__arg{}", i)); let name = self.ecx.ident_of(format!("__arg{}", i));
pats.push(self.ecx.pat_ident(e.span, name)); pats.push(self.ecx.pat_ident(e.span, name));
@ -748,7 +753,7 @@ impl<'a> Context<'a> {
fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr) fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr)
-> @ast::Expr { -> @ast::Expr {
let ty = match argno { let ty = match argno {
Exact(ref i) => self.arg_types[*i].get_ref(), Exact(ref i) => self.arg_types.get(*i).get_ref(),
Named(ref s) => self.name_types.get(s) Named(ref s) => self.name_types.get(s)
}; };
@ -822,7 +827,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
efmt: @ast::Expr, args: Vec<@ast::Expr>, efmt: @ast::Expr, args: Vec<@ast::Expr>,
name_ordering: Vec<~str>, name_ordering: Vec<~str>,
names: HashMap<~str, @ast::Expr>) -> @ast::Expr { names: HashMap<~str, @ast::Expr>) -> @ast::Expr {
let arg_types = vec::from_fn(args.len(), |_| None); let arg_types = Vec::from_fn(args.len(), |_| None);
let mut cx = Context { let mut cx = Context {
ecx: ecx, ecx: ecx,
args: args, args: args,
@ -871,7 +876,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
// Make sure that all arguments were used and all arguments have types. // Make sure that all arguments were used and all arguments have types.
for (i, ty) in cx.arg_types.iter().enumerate() { for (i, ty) in cx.arg_types.iter().enumerate() {
if ty.is_none() { if ty.is_none() {
cx.ecx.span_err(cx.args[i].span, "argument never used"); cx.ecx.span_err(cx.args.get(i).span, "argument never used");
} }
} }
for (name, e) in cx.names.iter() { for (name, e) in cx.names.iter() {

View file

@ -20,7 +20,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt,
-> base::MacResult { -> base::MacResult {
cx.print_backtrace(); cx.print_backtrace();
println!("{}", print::pprust::tt_to_str(&ast::TTDelim(@tt.to_owned()))); println!("{}", print::pprust::tt_to_str(&ast::TTDelim(
@tt.iter().map(|x| (*x).clone()).collect())));
//trivial expression //trivial expression
MRExpr(@ast::Expr { MRExpr(@ast::Expr {

View file

@ -17,6 +17,8 @@ use parse::token::*;
use parse::token; use parse::token;
use parse; use parse;
use std::vec_ng::Vec;
/** /**
* *
* Quasiquoting works via token trees. * Quasiquoting works via token trees.
@ -35,6 +37,8 @@ pub mod rt {
use parse; use parse;
use print::pprust; use print::pprust;
use std::vec_ng::Vec;
pub use ast::*; pub use ast::*;
pub use parse::token::*; pub use parse::token::*;
pub use parse::new_parser_from_tts; pub use parse::new_parser_from_tts;
@ -305,7 +309,7 @@ pub fn expand_quote_expr(cx: &mut ExtCtxt,
pub fn expand_quote_item(cx: &mut ExtCtxt, pub fn expand_quote_item(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult { tts: &[ast::TokenTree]) -> base::MacResult {
let e_attrs = cx.expr_vec_uniq(sp, Vec::new()); let e_attrs = cx.expr_vec_ng(sp);
let expanded = expand_parse_call(cx, sp, "parse_item", let expanded = expand_parse_call(cx, sp, "parse_item",
vec!(e_attrs), tts); vec!(e_attrs), tts);
base::MRExpr(expanded) base::MRExpr(expanded)
@ -332,7 +336,7 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt,
pub fn expand_quote_stmt(cx: &mut ExtCtxt, pub fn expand_quote_stmt(cx: &mut ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult { tts: &[ast::TokenTree]) -> base::MacResult {
let e_attrs = cx.expr_vec_uniq(sp, Vec::new()); let e_attrs = cx.expr_vec_ng(sp);
let expanded = expand_parse_call(cx, sp, "parse_stmt", let expanded = expand_parse_call(cx, sp, "parse_stmt",
vec!(e_attrs), tts); vec!(e_attrs), tts);
base::MRExpr(expanded) base::MRExpr(expanded)
@ -540,7 +544,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<@ast::Stmt> {
vec!(cx.stmt_expr(e_push)) vec!(cx.stmt_expr(e_push))
} }
ast::TTDelim(ref tts) => mk_tts(cx, sp, **tts), ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()),
ast::TTSeq(..) => fail!("TTSeq in quote!"), ast::TTSeq(..) => fail!("TTSeq in quote!"),
ast::TTNonterminal(sp, ident) => { ast::TTNonterminal(sp, ident) => {
@ -583,7 +587,9 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let mut p = parse::new_parser_from_tts(cx.parse_sess(), let mut p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), cx.cfg(),
tts.to_owned()); tts.iter()
.map(|x| (*x).clone())
.collect());
p.quote_depth += 1u; p.quote_depth += 1u;
let cx_expr = p.parse_expr(); let cx_expr = p.parse_expr();
@ -629,14 +635,14 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
id_ext("_sp"), id_ext("_sp"),
e_sp); e_sp);
let stmt_let_tt = cx.stmt_let(sp, true, let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
id_ext("tt"),
cx.expr_vec_uniq(sp, Vec::new()));
let mut vector = vec!(stmt_let_sp, stmt_let_tt);
vector.push_all_move(mk_tts(cx, sp, tts.as_slice()));
let block = cx.expr_block( let block = cx.expr_block(
cx.block_all(sp, cx.block_all(sp,
Vec::new(), Vec::new(),
vec!(stmt_let_sp, stmt_let_tt) + mk_tts(cx, sp, tts), vector,
Some(cx.expr_ident(sp, id_ext("tt"))))); Some(cx.expr_ident(sp, id_ext("tt")))));
(cx_expr, block) (cx_expr, block)

View file

@ -15,6 +15,8 @@ use diagnostic;
use visit; use visit;
use visit::Visitor; use visit::Visitor;
use std::vec_ng::Vec;
struct MacroRegistrarContext { struct MacroRegistrarContext {
registrars: Vec<(ast::NodeId, Span)> , registrars: Vec<(ast::NodeId, Span)> ,
} }
@ -23,7 +25,8 @@ impl Visitor<()> for MacroRegistrarContext {
fn visit_item(&mut self, item: &ast::Item, _: ()) { fn visit_item(&mut self, item: &ast::Item, _: ()) {
match item.node { match item.node {
ast::ItemFn(..) => { ast::ItemFn(..) => {
if attr::contains_name(item.attrs, "macro_registrar") { if attr::contains_name(item.attrs.as_slice(),
"macro_registrar") {
self.registrars.push((item.id, item.span)); self.registrars.push((item.id, item.span));
} }
} }

View file

@ -142,6 +142,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
return MacResult::dummy_expr(sp); return MacResult::dummy_expr(sp);
} }
Ok(bytes) => { Ok(bytes) => {
let bytes = bytes.iter().map(|x| *x).collect();
base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))
} }
} }

View file

@ -24,7 +24,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
let cfg = cx.cfg(); let cfg = cx.cfg();
let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
None, None,
tt.to_owned()); tt.iter().map(|x| (*x).clone()).collect());
let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup()); let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup());
if rust_parser.is_keyword(keywords::True) { if rust_parser.is_keyword(keywords::True) {

View file

@ -22,7 +22,7 @@ use parse::token::{Token, EOF, Nonterminal};
use parse::token; use parse::token;
use collections::HashMap; use collections::HashMap;
use std::vec; use std::vec_ng::Vec;
/* This is an Earley-like parser, without support for in-grammar nonterminals, /* This is an Earley-like parser, without support for in-grammar nonterminals,
only by calling out to the main rust parser for named nonterminals (which it only by calling out to the main rust parser for named nonterminals (which it
@ -103,7 +103,7 @@ pub struct MatcherPos {
sep: Option<Token>, sep: Option<Token>,
idx: uint, idx: uint,
up: Option<~MatcherPos>, up: Option<~MatcherPos>,
matches: vec!(Vec<@NamedMatch> ), matches: Vec<Vec<@NamedMatch>>,
match_lo: uint, match_hi: uint, match_lo: uint, match_hi: uint,
sp_lo: BytePos, sp_lo: BytePos,
} }
@ -112,7 +112,9 @@ pub fn count_names(ms: &[Matcher]) -> uint {
ms.iter().fold(0, |ct, m| { ms.iter().fold(0, |ct, m| {
ct + match m.node { ct + match m.node {
MatchTok(_) => 0u, MatchTok(_) => 0u,
MatchSeq(ref more_ms, _, _, _, _) => count_names((*more_ms)), MatchSeq(ref more_ms, _, _, _, _) => {
count_names(more_ms.as_slice())
}
MatchNonterminal(_, _, _) => 1u MatchNonterminal(_, _, _) => 1u
}}) }})
} }
@ -131,7 +133,7 @@ pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos)
} }
} }
} }
let matches = vec::from_fn(count_names(ms), |_i| Vec::new()); let matches = Vec::from_fn(count_names(ms.as_slice()), |_i| Vec::new());
~MatcherPos { ~MatcherPos {
elts: ms, elts: ms,
sep: sep, sep: sep,
@ -208,7 +210,7 @@ pub fn parse_or_else<R: Reader>(sess: @ParseSess,
rdr: R, rdr: R,
ms: Vec<Matcher> ) ms: Vec<Matcher> )
-> HashMap<Ident, @NamedMatch> { -> HashMap<Ident, @NamedMatch> {
match parse(sess, cfg, rdr, ms) { match parse(sess, cfg, rdr, ms.as_slice()) {
Success(m) => m, Success(m) => m,
Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str),
Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
@ -231,7 +233,11 @@ pub fn parse<R: Reader>(sess: @ParseSess,
ms: &[Matcher]) ms: &[Matcher])
-> ParseResult { -> ParseResult {
let mut cur_eis = Vec::new(); let mut cur_eis = Vec::new();
cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo)); cur_eis.push(initial_matcher_pos(ms.iter()
.map(|x| (*x).clone())
.collect(),
None,
rdr.peek().sp.lo));
loop { loop {
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
@ -274,8 +280,9 @@ pub fn parse<R: Reader>(sess: @ParseSess,
// Only touch the binders we have actually bound // Only touch the binders we have actually bound
for idx in range(ei.match_lo, ei.match_hi) { for idx in range(ei.match_lo, ei.match_hi) {
let sub = ei.matches[idx].clone(); let sub = (*ei.matches.get(idx)).clone();
new_pos.matches[idx] new_pos.matches
.get_mut(idx)
.push(@MatchedSeq(sub, mk_sp(ei.sp_lo, .push(@MatchedSeq(sub, mk_sp(ei.sp_lo,
sp.hi))); sp.hi)));
} }
@ -308,7 +315,7 @@ pub fn parse<R: Reader>(sess: @ParseSess,
eof_eis.push(ei); eof_eis.push(ei);
} }
} else { } else {
match ei.elts[idx].node.clone() { match ei.elts.get(idx).node.clone() {
/* need to descend into sequence */ /* need to descend into sequence */
MatchSeq(ref matchers, ref sep, zero_ok, MatchSeq(ref matchers, ref sep, zero_ok,
match_idx_lo, match_idx_hi) => { match_idx_lo, match_idx_hi) => {
@ -317,13 +324,15 @@ pub fn parse<R: Reader>(sess: @ParseSess,
new_ei.idx += 1u; new_ei.idx += 1u;
//we specifically matched zero repeats. //we specifically matched zero repeats.
for idx in range(match_idx_lo, match_idx_hi) { for idx in range(match_idx_lo, match_idx_hi) {
new_ei.matches[idx].push(@MatchedSeq(Vec::new(), sp)); new_ei.matches
.get_mut(idx)
.push(@MatchedSeq(Vec::new(), sp));
} }
cur_eis.push(new_ei); cur_eis.push(new_ei);
} }
let matches = vec::from_elem(ei.matches.len(), Vec::new()); let matches = Vec::from_elem(ei.matches.len(), Vec::new());
let ei_t = ei; let ei_t = ei;
cur_eis.push(~MatcherPos { cur_eis.push(~MatcherPos {
elts: (*matchers).clone(), elts: (*matchers).clone(),
@ -352,10 +361,10 @@ pub fn parse<R: Reader>(sess: @ParseSess,
if token_name_eq(&tok, &EOF) { if token_name_eq(&tok, &EOF) {
if eof_eis.len() == 1u { if eof_eis.len() == 1u {
let mut v = Vec::new(); let mut v = Vec::new();
for dv in eof_eis[0u].matches.mut_iter() { for dv in eof_eis.get_mut(0).matches.mut_iter() {
v.push(dv.pop().unwrap()); v.push(dv.pop().unwrap());
} }
return Success(nameize(sess, ms, v)); return Success(nameize(sess, ms, v.as_slice()));
} else if eof_eis.len() > 1u { } else if eof_eis.len() > 1u {
return Error(sp, ~"ambiguity: multiple successful parses"); return Error(sp, ~"ambiguity: multiple successful parses");
} else { } else {
@ -365,7 +374,7 @@ pub fn parse<R: Reader>(sess: @ParseSess,
if (bb_eis.len() > 0u && next_eis.len() > 0u) if (bb_eis.len() > 0u && next_eis.len() > 0u)
|| bb_eis.len() > 1u { || bb_eis.len() > 1u {
let nts = bb_eis.map(|ei| { let nts = bb_eis.map(|ei| {
match ei.elts[ei.idx].node { match ei.elts.get(ei.idx).node {
MatchNonterminal(bind, name, _) => { MatchNonterminal(bind, name, _) => {
format!("{} ('{}')", format!("{} ('{}')",
token::get_ident(name), token::get_ident(name),
@ -390,10 +399,10 @@ pub fn parse<R: Reader>(sess: @ParseSess,
let mut rust_parser = Parser(sess, cfg.clone(), rdr.dup()); let mut rust_parser = Parser(sess, cfg.clone(), rdr.dup());
let mut ei = bb_eis.pop().unwrap(); let mut ei = bb_eis.pop().unwrap();
match ei.elts[ei.idx].node { match ei.elts.get(ei.idx).node {
MatchNonterminal(_, name, idx) => { MatchNonterminal(_, name, idx) => {
let name_string = token::get_ident(name); let name_string = token::get_ident(name);
ei.matches[idx].push(@MatchedNonterminal( ei.matches.get_mut(idx).push(@MatchedNonterminal(
parse_nt(&mut rust_parser, name_string.get()))); parse_nt(&mut rust_parser, name_string.get())));
ei.idx += 1u; ei.idx += 1u;
} }

View file

@ -25,9 +25,11 @@ use parse::token::{special_idents, gensym_ident};
use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
use parse::token; use parse::token;
use print; use print;
use std::cell::RefCell;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::cell::RefCell;
use std::vec_ng::Vec;
struct ParserAnyMacro { struct ParserAnyMacro {
parser: RefCell<Parser>, parser: RefCell<Parser>,
} }
@ -100,7 +102,12 @@ impl MacroExpander for MacroRulesMacroExpander {
sp: Span, sp: Span,
arg: &[ast::TokenTree]) arg: &[ast::TokenTree])
-> MacResult { -> MacResult {
generic_extension(cx, sp, self.name, arg, *self.lhses, *self.rhses) generic_extension(cx,
sp,
self.name,
arg,
self.lhses.as_slice(),
self.rhses.as_slice())
} }
} }
@ -115,7 +122,9 @@ fn generic_extension(cx: &ExtCtxt,
if cx.trace_macros() { if cx.trace_macros() {
println!("{}! \\{ {} \\}", println!("{}! \\{ {} \\}",
token::get_ident(name), token::get_ident(name),
print::pprust::tt_to_str(&TTDelim(@arg.to_owned()))); print::pprust::tt_to_str(&TTDelim(@arg.iter()
.map(|x| (*x).clone())
.collect())));
} }
// Which arm's failure should we report? (the one furthest along) // Which arm's failure should we report? (the one furthest along)
@ -128,8 +137,12 @@ fn generic_extension(cx: &ExtCtxt,
match **lhs { match **lhs {
MatchedNonterminal(NtMatchers(ref mtcs)) => { MatchedNonterminal(NtMatchers(ref mtcs)) => {
// `None` is because we're not interpolating // `None` is because we're not interpolating
let arg_rdr = new_tt_reader(s_d, None, arg.to_owned()); let arg_rdr = new_tt_reader(s_d,
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) { None,
arg.iter()
.map(|x| (*x).clone())
.collect());
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) {
Success(named_matches) => { Success(named_matches) => {
let rhs = match *rhses[i] { let rhs = match *rhses[i] {
// okay, what's your transcriber? // okay, what's your transcriber?
@ -137,7 +150,10 @@ fn generic_extension(cx: &ExtCtxt,
match *tt { match *tt {
// cut off delimiters; don't parse 'em // cut off delimiters; don't parse 'em
TTDelim(ref tts) => { TTDelim(ref tts) => {
(*tts).slice(1u,(*tts).len()-1u).to_owned() (*tts).slice(1u,(*tts).len()-1u)
.iter()
.map(|x| (*x).clone())
.collect()
} }
_ => cx.span_fatal( _ => cx.span_fatal(
sp, "macro rhs must be delimited") sp, "macro rhs must be delimited")

View file

@ -18,6 +18,7 @@ use parse::token;
use parse::lexer::TokenAndSpan; use parse::lexer::TokenAndSpan;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::vec_ng::Vec;
use collections::HashMap; use collections::HashMap;
///an unzipping of `TokenTree`s ///an unzipping of `TokenTree`s
@ -106,7 +107,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: @NamedMatch)
// end of the line; duplicate henceforth // end of the line; duplicate henceforth
ad ad
} }
MatchedSeq(ref ads, _) => ads[*idx] MatchedSeq(ref ads, _) => *ads.get(*idx)
} }
} }
let repeat_idx = r.repeat_idx.borrow(); let repeat_idx = r.repeat_idx.borrow();
@ -217,7 +218,8 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
r.stack.get().idx.set(0u); r.stack.get().idx.set(0u);
{ {
let mut repeat_idx = r.repeat_idx.borrow_mut(); let mut repeat_idx = r.repeat_idx.borrow_mut();
repeat_idx.get()[repeat_idx.get().len() - 1u] += 1u; let last_repeat_idx = repeat_idx.get().len() - 1u;
*repeat_idx.get().get_mut(last_repeat_idx) += 1u;
} }
match r.stack.get().sep.clone() { match r.stack.get().sep.clone() {
Some(tk) => { Some(tk) => {
@ -231,7 +233,7 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
loop { /* because it's easiest, this handles `TTDelim` not starting loop { /* because it's easiest, this handles `TTDelim` not starting
with a `TTTok`, even though it won't happen */ with a `TTTok`, even though it won't happen */
// FIXME(pcwalton): Bad copy. // FIXME(pcwalton): Bad copy.
match r.stack.get().forest[r.stack.get().idx.get()].clone() { match (*r.stack.get().forest.get(r.stack.get().idx.get())).clone() {
TTDelim(tts) => { TTDelim(tts) => {
r.stack.set(@TtFrame { r.stack.set(@TtFrame {
forest: tts, forest: tts,

View file

@ -16,6 +16,8 @@ use parse::token;
use opt_vec::OptVec; use opt_vec::OptVec;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::vec_ng::Vec;
// We may eventually want to be able to fold over type parameters, too. // We may eventually want to be able to fold over type parameters, too.
pub trait Folder { pub trait Folder {
fn fold_crate(&mut self, c: Crate) -> Crate { fn fold_crate(&mut self, c: Crate) -> Crate {
@ -23,11 +25,11 @@ pub trait Folder {
} }
fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> Vec<@MetaItem> { fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> Vec<@MetaItem> {
meta_items.map(|x| fold_meta_item_(*x, self)) meta_items.iter().map(|x| fold_meta_item_(*x, self)).collect()
} }
fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> { fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
view_paths.map(|view_path| { view_paths.iter().map(|view_path| {
let inner_view_path = match view_path.node { let inner_view_path = match view_path.node {
ViewPathSimple(ref ident, ref path, node_id) => { ViewPathSimple(ref ident, ref path, node_id) => {
ViewPathSimple(ident.clone(), ViewPathSimple(ident.clone(),
@ -60,7 +62,7 @@ pub trait Folder {
node: inner_view_path, node: inner_view_path,
span: self.new_span(view_path.span), span: self.new_span(view_path.span),
} }
}) }).collect()
} }
fn fold_view_item(&mut self, vi: &ViewItem) -> ViewItem { fn fold_view_item(&mut self, vi: &ViewItem) -> ViewItem {
@ -275,7 +277,7 @@ pub trait Folder {
node: match macro.node { node: match macro.node {
MacInvocTT(ref p, ref tts, ctxt) => { MacInvocTT(ref p, ref tts, ctxt) => {
MacInvocTT(self.fold_path(p), MacInvocTT(self.fold_path(p),
fold_tts(*tts, self), fold_tts(tts.as_slice(), self),
ctxt) ctxt)
} }
}, },
@ -284,7 +286,7 @@ pub trait Folder {
} }
fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> Vec<@Expr> { fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> Vec<@Expr> {
es.map(|x| f(*x)) es.iter().map(|x| f(*x)).collect()
} }
fn new_id(&mut self, i: NodeId) -> NodeId { fn new_id(&mut self, i: NodeId) -> NodeId {
@ -371,20 +373,20 @@ fn fold_arg_<T: Folder>(a: &Arg, fld: &mut T) -> Arg {
// token::LIFETIME are certainly not loop labels. But we can't tell in their // token::LIFETIME are certainly not loop labels. But we can't tell in their
// token form. So this is less ideal and hacky but it works. // token form. So this is less ideal and hacky but it works.
pub fn fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> { pub fn fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
tts.map(|tt| { tts.iter().map(|tt| {
match *tt { match *tt {
TTTok(span, ref tok) => TTTok(span, ref tok) =>
TTTok(span,maybe_fold_ident(tok,fld)), TTTok(span,maybe_fold_ident(tok,fld)),
TTDelim(tts) => TTDelim(@fold_tts(*tts, fld)), TTDelim(tts) => TTDelim(@fold_tts(tts.as_slice(), fld)),
TTSeq(span, pattern, ref sep, is_optional) => TTSeq(span, pattern, ref sep, is_optional) =>
TTSeq(span, TTSeq(span,
@fold_tts(*pattern, fld), @fold_tts(pattern.as_slice(), fld),
sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)), sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)),
is_optional), is_optional),
TTNonterminal(sp,ref ident) => TTNonterminal(sp,ref ident) =>
TTNonterminal(sp,fld.fold_ident(*ident)) TTNonterminal(sp,fld.fold_ident(*ident))
} }
}) }).collect()
} }
// apply ident folder if it's an ident, otherwise leave it alone // apply ident folder if it's an ident, otherwise leave it alone
@ -518,7 +520,7 @@ pub fn noop_fold_view_item<T: Folder>(vi: &ViewItem, folder: &mut T)
folder.new_id(node_id)) folder.new_id(node_id))
} }
ViewItemUse(ref view_paths) => { ViewItemUse(ref view_paths) => {
ViewItemUse(folder.fold_view_paths(*view_paths)) ViewItemUse(folder.fold_view_paths(view_paths.as_slice()))
} }
}; };
ViewItem { ViewItem {
@ -881,7 +883,7 @@ mod test {
// this version doesn't care about getting comments or docstrings in. // this version doesn't care about getting comments or docstrings in.
fn fake_print_crate(s: &mut pprust::State, fn fake_print_crate(s: &mut pprust::State,
krate: &ast::Crate) -> io::IoResult<()> { krate: &ast::Crate) -> io::IoResult<()> {
pprust::print_mod(s, &krate.module, krate.attrs) pprust::print_mod(s, &krate.module, krate.attrs.as_slice())
} }
// change every identifier to "zz" // change every identifier to "zz"

View file

@ -15,8 +15,9 @@
* other useful things like `push()` and `len()`. * other useful things like `push()` and `len()`.
*/ */
use std::vec;
use std::default::Default; use std::default::Default;
use std::vec;
use std::vec_ng::Vec;
#[deriving(Clone, Encodable, Decodable, Hash)] #[deriving(Clone, Encodable, Decodable, Hash)]
pub enum OptVec<T> { pub enum OptVec<T> {
@ -87,7 +88,7 @@ impl<T> OptVec<T> {
pub fn get<'a>(&'a self, i: uint) -> &'a T { pub fn get<'a>(&'a self, i: uint) -> &'a T {
match *self { match *self {
Empty => fail!("invalid index {}", i), Empty => fail!("invalid index {}", i),
Vec(ref v) => &v[i] Vec(ref v) => v.get(i)
} }
} }
@ -147,7 +148,7 @@ impl<T:Clone> OptVec<T> {
let mut v0 = vec!(t); let mut v0 = vec!(t);
match *self { match *self {
Empty => {} Empty => {}
Vec(ref v1) => { v0.push_all(*v1); } Vec(ref v1) => { v0.push_all(v1.as_slice()); }
} }
return Vec(v0); return Vec(v0);
} }

View file

@ -15,6 +15,8 @@ use parse::token;
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::INTERPOLATED; use parse::token::INTERPOLATED;
use std::vec_ng::Vec;
// a parser that can parse attributes. // a parser that can parse attributes.
pub trait ParserAttr { pub trait ParserAttr {
fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> ; fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> ;

View file

@ -20,6 +20,7 @@ use parse::token;
use std::io; use std::io;
use std::str; use std::str;
use std::uint; use std::uint;
use std::vec_ng::Vec;
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
pub enum CommentStyle { pub enum CommentStyle {
@ -58,20 +59,20 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
let mut i = 0u; let mut i = 0u;
let mut j = lines.len(); let mut j = lines.len();
// first line of all-stars should be omitted // first line of all-stars should be omitted
if lines.len() > 0 && lines[0].chars().all(|c| c == '*') { if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') {
i += 1; i += 1;
} }
while i < j && lines[i].trim().is_empty() { while i < j && lines.get(i).trim().is_empty() {
i += 1; i += 1;
} }
// like the first, a last line of all stars should be omitted // like the first, a last line of all stars should be omitted
if j > i && lines[j - 1].chars().skip(1).all(|c| c == '*') { if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') {
j -= 1; j -= 1;
} }
while j > i && lines[j - 1].trim().is_empty() { while j > i && lines.get(j - 1).trim().is_empty() {
j -= 1; j -= 1;
} }
return lines.slice(i, j).to_owned(); return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
} }
/// remove a "[ \t]*\*" block from each line, if possible /// remove a "[ \t]*\*" block from each line, if possible

View file

@ -1005,6 +1005,7 @@ mod test {
use parse::token; use parse::token;
use parse::token::{str_to_ident}; use parse::token::{str_to_ident};
use std::io::util; use std::io::util;
use std::vec_ng::Vec;
// represents a testing reader (incl. both reader and interner) // represents a testing reader (incl. both reader and interner)
struct Env { struct Env {

View file

@ -21,6 +21,7 @@ use parse::parser::Parser;
use std::cell::RefCell; use std::cell::RefCell;
use std::io::File; use std::io::File;
use std::str; use std::str;
use std::vec_ng::Vec;
pub mod lexer; pub mod lexer;
pub mod parser; pub mod parser;
@ -288,6 +289,7 @@ mod test {
use std::io; use std::io;
use std::io::MemWriter; use std::io::MemWriter;
use std::str; use std::str;
use std::vec_ng::Vec;
use codemap::{Span, BytePos, Spanned}; use codemap::{Span, BytePos, Spanned};
use opt_vec; use opt_vec;
use ast; use ast;
@ -362,27 +364,28 @@ mod test {
// check the token-tree-ization of macros // check the token-tree-ization of macros
#[test] fn string_to_tts_macro () { #[test] fn string_to_tts_macro () {
let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))"); let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))");
let tts: &[ast::TokenTree] = tts; let tts: &[ast::TokenTree] = tts.as_slice();
match tts { match tts {
[ast::TTTok(_,_), [ast::TTTok(_,_),
ast::TTTok(_,token::NOT), ast::TTTok(_,token::NOT),
ast::TTTok(_,_), ast::TTTok(_,_),
ast::TTDelim(delim_elts)] => { ast::TTDelim(delim_elts)] => {
let delim_elts: &[ast::TokenTree] = *delim_elts; let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
match delim_elts { match delim_elts {
[ast::TTTok(_,token::LPAREN), [ast::TTTok(_,token::LPAREN),
ast::TTDelim(first_set), ast::TTDelim(first_set),
ast::TTTok(_,token::FAT_ARROW), ast::TTTok(_,token::FAT_ARROW),
ast::TTDelim(second_set), ast::TTDelim(second_set),
ast::TTTok(_,token::RPAREN)] => { ast::TTTok(_,token::RPAREN)] => {
let first_set: &[ast::TokenTree] = *first_set; let first_set: &[ast::TokenTree] =
first_set.as_slice();
match first_set { match first_set {
[ast::TTTok(_,token::LPAREN), [ast::TTTok(_,token::LPAREN),
ast::TTTok(_,token::DOLLAR), ast::TTTok(_,token::DOLLAR),
ast::TTTok(_,_), ast::TTTok(_,_),
ast::TTTok(_,token::RPAREN)] => { ast::TTTok(_,token::RPAREN)] => {
let second_set: &[ast::TokenTree] = let second_set: &[ast::TokenTree] =
*second_set; second_set.as_slice();
match second_set { match second_set {
[ast::TTTok(_,token::LPAREN), [ast::TTTok(_,token::LPAREN),
ast::TTTok(_,token::DOLLAR), ast::TTTok(_,token::DOLLAR),

View file

@ -82,7 +82,8 @@ use std::cell::Cell;
use collections::HashSet; use collections::HashSet;
use std::kinds::marker; use std::kinds::marker;
use std::mem::replace; use std::mem::replace;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
#[deriving(Eq)] #[deriving(Eq)]
@ -270,7 +271,7 @@ fn maybe_append(lhs: Vec<Attribute> , rhs: Option<Vec<Attribute> >)
-> Vec<Attribute> { -> Vec<Attribute> {
match rhs { match rhs {
None => lhs, None => lhs,
Some(ref attrs) => vec_ng::append(lhs, (*attrs)) Some(ref attrs) => vec_ng::append(lhs, attrs.as_slice())
} }
} }
@ -406,8 +407,11 @@ impl Parser {
} else if inedible.contains(&self.token) { } else if inedible.contains(&self.token) {
// leave it in the input // leave it in the input
} else { } else {
let expected = vec_ng::append(edible.to_owned(), inedible); let expected = vec_ng::append(edible.iter()
let expect = tokens_to_str(expected); .map(|x| (*x).clone())
.collect(),
inedible);
let expect = tokens_to_str(expected.as_slice());
let actual = self.this_token_to_str(); let actual = self.this_token_to_str();
self.fatal( self.fatal(
if expected.len() != 1 { if expected.len() != 1 {
@ -445,8 +449,12 @@ impl Parser {
match e.node { match e.node {
ExprPath(..) => { ExprPath(..) => {
// might be unit-struct construction; check for recoverableinput error. // might be unit-struct construction; check for recoverableinput error.
let expected = vec_ng::append(edible.to_owned(), inedible); let expected = vec_ng::append(edible.iter()
self.check_for_erroneous_unit_struct_expecting(expected); .map(|x| (*x).clone())
.collect(),
inedible);
self.check_for_erroneous_unit_struct_expecting(
expected.as_slice());
} }
_ => {} _ => {}
} }
@ -464,8 +472,12 @@ impl Parser {
debug!("commit_stmt {:?}", s); debug!("commit_stmt {:?}", s);
let _s = s; // unused, but future checks might want to inspect `s`. let _s = s; // unused, but future checks might want to inspect `s`.
if self.last_token.as_ref().map_or(false, |t| is_ident_or_path(*t)) { if self.last_token.as_ref().map_or(false, |t| is_ident_or_path(*t)) {
let expected = vec_ng::append(edible.to_owned(), inedible); let expected = vec_ng::append(edible.iter()
self.check_for_erroneous_unit_struct_expecting(expected); .map(|x| (*x).clone())
.collect(),
inedible.as_slice());
self.check_for_erroneous_unit_struct_expecting(
expected.as_slice());
} }
self.expect_one_of(edible, inedible) self.expect_one_of(edible, inedible)
} }
@ -1082,7 +1094,7 @@ impl Parser {
debug!("parse_trait_methods(): parsing provided method"); debug!("parse_trait_methods(): parsing provided method");
let (inner_attrs, body) = let (inner_attrs, body) =
p.parse_inner_attrs_and_block(); p.parse_inner_attrs_and_block();
let attrs = vec_ng::append(attrs, inner_attrs); let attrs = vec_ng::append(attrs, inner_attrs.as_slice());
Provided(@ast::Method { Provided(@ast::Method {
ident: ident, ident: ident,
attrs: attrs, attrs: attrs,
@ -1189,7 +1201,7 @@ impl Parser {
if ts.len() == 1 && !one_tuple { if ts.len() == 1 && !one_tuple {
self.expect(&token::RPAREN); self.expect(&token::RPAREN);
return ts[0] return *ts.get(0)
} }
let t = TyTup(ts); let t = TyTup(ts);
@ -1769,7 +1781,7 @@ impl Parser {
self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN); self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN);
return if es.len() == 1 && !trailing_comma { return if es.len() == 1 && !trailing_comma {
self.mk_expr(lo, hi, ExprParen(es[0])) self.mk_expr(lo, hi, ExprParen(*es.get(0)))
} }
else { else {
self.mk_expr(lo, hi, ExprTup(es)) self.mk_expr(lo, hi, ExprTup(es))
@ -1859,7 +1871,9 @@ impl Parser {
seq_sep_trailing_allowed(token::COMMA), seq_sep_trailing_allowed(token::COMMA),
|p| p.parse_expr() |p| p.parse_expr()
); );
ex = ExprVec(vec!(first_expr) + remaining_exprs, mutbl); let mut exprs = vec!(first_expr);
exprs.push_all_move(remaining_exprs);
ex = ExprVec(exprs, mutbl);
} else { } else {
// Vector with one element. // Vector with one element.
self.expect(&token::RBRACKET); self.expect(&token::RBRACKET);
@ -3327,7 +3341,7 @@ impl Parser {
while self.token != token::RBRACE { while self.token != token::RBRACE {
// parsing items even when they're not allowed lets us give // parsing items even when they're not allowed lets us give
// better error messages and recover more gracefully. // better error messages and recover more gracefully.
attributes_box.push_all(self.parse_outer_attributes()); attributes_box.push_all(self.parse_outer_attributes().as_slice());
match self.token { match self.token {
token::SEMI => { token::SEMI => {
if !attributes_box.is_empty() { if !attributes_box.is_empty() {
@ -3850,7 +3864,7 @@ impl Parser {
let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let (inner_attrs, body) = self.parse_inner_attrs_and_block();
let hi = body.span.hi; let hi = body.span.hi;
let attrs = vec_ng::append(attrs, inner_attrs); let attrs = vec_ng::append(attrs, inner_attrs.as_slice());
@ast::Method { @ast::Method {
ident: ident, ident: ident,
attrs: attrs, attrs: attrs,
@ -4082,7 +4096,8 @@ impl Parser {
while self.token != term { while self.token != term {
let mut attrs = self.parse_outer_attributes(); let mut attrs = self.parse_outer_attributes();
if first { if first {
attrs = attrs_remaining + attrs; attrs = vec_ng::append(attrs_remaining.clone(),
attrs.as_slice());
first = false; first = false;
} }
debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})", debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})",
@ -4164,7 +4179,7 @@ impl Parser {
-> (ast::Item_, Vec<ast::Attribute> ) { -> (ast::Item_, Vec<ast::Attribute> ) {
let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span)); let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span));
prefix.pop(); prefix.pop();
let mod_path = Path::new(".").join_many(self.mod_path_stack); let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice());
let dir_path = prefix.join(&mod_path); let dir_path = prefix.join(&mod_path);
let file_path = match ::attr::first_attr_value_str_by_name( let file_path = match ::attr::first_attr_value_str_by_name(
outer_attrs, "path") { outer_attrs, "path") {
@ -4194,7 +4209,7 @@ impl Parser {
}; };
self.eval_src_mod_from_path(file_path, self.eval_src_mod_from_path(file_path,
outer_attrs.to_owned(), outer_attrs.iter().map(|x| *x).collect(),
id_sp) id_sp)
} }
@ -4231,7 +4246,7 @@ impl Parser {
&path, &path,
id_sp); id_sp);
let (inner, next) = p0.parse_inner_attrs_and_next(); let (inner, next) = p0.parse_inner_attrs_and_next();
let mod_attrs = vec_ng::append(outer_attrs, inner); let mod_attrs = vec_ng::append(outer_attrs, inner.as_slice());
let first_item_outer_attrs = next; let first_item_outer_attrs = next;
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
{ {
@ -4556,7 +4571,7 @@ impl Parser {
match self.token { match self.token {
INTERPOLATED(token::NtItem(item)) => { INTERPOLATED(token::NtItem(item)) => {
self.bump(); self.bump();
let new_attrs = vec_ng::append(attrs, item.attrs); let new_attrs = vec_ng::append(attrs, item.attrs.as_slice());
return IoviItem(@Item { return IoviItem(@Item {
attrs: new_attrs, attrs: new_attrs,
..(*item).clone() ..(*item).clone()
@ -4662,7 +4677,8 @@ impl Parser {
} }
if self.eat_keyword(keywords::Mod) { if self.eat_keyword(keywords::Mod) {
// MODULE ITEM // MODULE ITEM
let (ident, item_, extra_attrs) = self.parse_item_mod(attrs); let (ident, item_, extra_attrs) =
self.parse_item_mod(attrs.as_slice());
let item = self.mk_item(lo, let item = self.mk_item(lo,
self.last_span.hi, self.last_span.hi,
ident, ident,
@ -4946,7 +4962,7 @@ impl Parser {
} }
_ => () _ => ()
} }
let last = path[path.len() - 1u]; let last = *path.get(path.len() - 1u);
let path = ast::Path { let path = ast::Path {
span: mk_sp(lo, self.span.hi), span: mk_sp(lo, self.span.hi),
global: false, global: false,
@ -4984,7 +5000,8 @@ impl Parser {
macros_allowed: bool) macros_allowed: bool)
-> ParsedItemsAndViewItems { -> ParsedItemsAndViewItems {
let mut attrs = vec_ng::append(first_item_attrs, let mut attrs = vec_ng::append(first_item_attrs,
self.parse_outer_attributes()); self.parse_outer_attributes()
.as_slice());
// First, parse view items. // First, parse view items.
let mut view_items : Vec<ast::ViewItem> = Vec::new(); let mut view_items : Vec<ast::ViewItem> = Vec::new();
let mut items = Vec::new(); let mut items = Vec::new();
@ -5065,7 +5082,8 @@ impl Parser {
macros_allowed: bool) macros_allowed: bool)
-> ParsedItemsAndViewItems { -> ParsedItemsAndViewItems {
let mut attrs = vec_ng::append(first_item_attrs, let mut attrs = vec_ng::append(first_item_attrs,
self.parse_outer_attributes()); self.parse_outer_attributes()
.as_slice());
let mut foreign_items = Vec::new(); let mut foreign_items = Vec::new();
loop { loop {
match self.parse_foreign_item(attrs, macros_allowed) { match self.parse_foreign_item(attrs, macros_allowed) {

View file

@ -21,6 +21,7 @@ use std::char;
use std::fmt; use std::fmt;
use std::local_data; use std::local_data;
use std::path::BytesContainer; use std::path::BytesContainer;
use std::vec_ng::Vec;
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
#[deriving(Clone, Encodable, Decodable, Eq, Hash, Show)] #[deriving(Clone, Encodable, Decodable, Eq, Hash, Show)]
@ -412,13 +413,11 @@ macro_rules! declare_special_idents_and_keywords {(
// The indices here must correspond to the numbers in // The indices here must correspond to the numbers in
// special_idents, in Keyword to_ident(), and in static // special_idents, in Keyword to_ident(), and in static
// constants below. // constants below.
let init_vec = vec!( let mut init_vec = Vec::new();
$( $si_str, )* $(init_vec.push($si_str);)*
$( $sk_str, )* $(init_vec.push($sk_str);)*
$( $rk_str, )* $(init_vec.push($rk_str);)*
); interner::StrInterner::prefill(init_vec.as_slice())
interner::StrInterner::prefill(init_vec)
} }
}} }}

View file

@ -62,7 +62,7 @@
*/ */
use std::io; use std::io;
use std::vec; use std::vec_ng::Vec;
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
pub enum Breaks { pub enum Breaks {
@ -131,7 +131,7 @@ pub fn buf_str(toks: Vec<Token> , szs: Vec<int> , left: uint, right: uint,
if i != left { if i != left {
s.push_str(", "); s.push_str(", ");
} }
s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone()))); s.push_str(format!("{}={}", szs.get(i), tok_str(toks.get(i).clone())));
i += 1u; i += 1u;
i %= n; i %= n;
} }
@ -156,9 +156,9 @@ pub fn mk_printer(out: ~io::Writer, linewidth: uint) -> Printer {
// fall behind. // fall behind.
let n: uint = 3 * linewidth; let n: uint = 3 * linewidth;
debug!("mk_printer {}", linewidth); debug!("mk_printer {}", linewidth);
let token: Vec<Token> = vec::from_elem(n, Eof); let token: Vec<Token> = Vec::from_elem(n, Eof);
let size: Vec<int> = vec::from_elem(n, 0); let size: Vec<int> = Vec::from_elem(n, 0);
let scan_stack: Vec<uint> = vec::from_elem(n, 0u); let scan_stack: Vec<uint> = Vec::from_elem(n, 0u);
Printer { Printer {
out: out, out: out,
buf_len: n, buf_len: n,
@ -286,11 +286,11 @@ pub struct Printer {
impl Printer { impl Printer {
pub fn last_token(&mut self) -> Token { pub fn last_token(&mut self) -> Token {
self.token[self.right].clone() (*self.token.get(self.right)).clone()
} }
// be very careful with this! // be very careful with this!
pub fn replace_last_token(&mut self, t: Token) { pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t; *self.token.get_mut(self.right) = t;
} }
pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> { pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
debug!("pp ~[{},{}]", self.left, self.right); debug!("pp ~[{},{}]", self.left, self.right);
@ -298,8 +298,9 @@ impl Printer {
Eof => { Eof => {
if !self.scan_stack_empty { if !self.scan_stack_empty {
self.check_stack(0); self.check_stack(0);
let left = self.token[self.left].clone(); let left = (*self.token.get(self.left)).clone();
try!(self.advance_left(left, self.size[self.left])); let left_size = *self.size.get(self.left);
try!(self.advance_left(left, left_size));
} }
self.indent(0); self.indent(0);
Ok(()) Ok(())
@ -313,8 +314,8 @@ impl Printer {
} else { self.advance_right(); } } else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]", debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right); b.offset, self.left, self.right);
self.token[self.right] = t; *self.token.get_mut(self.right) = t;
self.size[self.right] = -self.right_total; *self.size.get_mut(self.right) = -self.right_total;
self.scan_push(self.right); self.scan_push(self.right);
Ok(()) Ok(())
} }
@ -325,8 +326,8 @@ impl Printer {
} else { } else {
debug!("pp End/buffer ~[{},{}]", self.left, self.right); debug!("pp End/buffer ~[{},{}]", self.left, self.right);
self.advance_right(); self.advance_right();
self.token[self.right] = t; *self.token.get_mut(self.right) = t;
self.size[self.right] = -1; *self.size.get_mut(self.right) = -1;
self.scan_push(self.right); self.scan_push(self.right);
Ok(()) Ok(())
} }
@ -342,8 +343,8 @@ impl Printer {
b.offset, self.left, self.right); b.offset, self.left, self.right);
self.check_stack(0); self.check_stack(0);
self.scan_push(self.right); self.scan_push(self.right);
self.token[self.right] = t; *self.token.get_mut(self.right) = t;
self.size[self.right] = -self.right_total; *self.size.get_mut(self.right) = -self.right_total;
self.right_total += b.blank_space; self.right_total += b.blank_space;
Ok(()) Ok(())
} }
@ -356,8 +357,8 @@ impl Printer {
debug!("pp String('{}')/buffer ~[{},{}]", debug!("pp String('{}')/buffer ~[{},{}]",
*s, self.left, self.right); *s, self.left, self.right);
self.advance_right(); self.advance_right();
self.token[self.right] = t.clone(); *self.token.get_mut(self.right) = t.clone();
self.size[self.right] = len; *self.size.get_mut(self.right) = len;
self.right_total += len; self.right_total += len;
self.check_stream() self.check_stream()
} }
@ -371,13 +372,15 @@ impl Printer {
debug!("scan window is {}, longer than space on line ({})", debug!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space); self.right_total - self.left_total, self.space);
if !self.scan_stack_empty { if !self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] { if self.left == *self.scan_stack.get(self.bottom) {
debug!("setting {} to infinity and popping", self.left); debug!("setting {} to infinity and popping", self.left);
self.size[self.scan_pop_bottom()] = SIZE_INFINITY; let scanned = self.scan_pop_bottom();
*self.size.get_mut(scanned) = SIZE_INFINITY;
} }
} }
let left = self.token[self.left].clone(); let left = (*self.token.get(self.left)).clone();
try!(self.advance_left(left, self.size[self.left])); let left_size = *self.size.get(self.left);
try!(self.advance_left(left, left_size));
if self.left != self.right { if self.left != self.right {
try!(self.check_stream()); try!(self.check_stream());
} }
@ -393,26 +396,30 @@ impl Printer {
self.top %= self.buf_len; self.top %= self.buf_len;
assert!((self.top != self.bottom)); assert!((self.top != self.bottom));
} }
self.scan_stack[self.top] = x; *self.scan_stack.get_mut(self.top) = x;
} }
pub fn scan_pop(&mut self) -> uint { pub fn scan_pop(&mut self) -> uint {
assert!((!self.scan_stack_empty)); assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top]; let x = *self.scan_stack.get(self.top);
if self.top == self.bottom { if self.top == self.bottom {
self.scan_stack_empty = true; self.scan_stack_empty = true;
} else { self.top += self.buf_len - 1u; self.top %= self.buf_len; } } else {
self.top += self.buf_len - 1u; self.top %= self.buf_len;
}
return x; return x;
} }
pub fn scan_top(&mut self) -> uint { pub fn scan_top(&mut self) -> uint {
assert!((!self.scan_stack_empty)); assert!((!self.scan_stack_empty));
return self.scan_stack[self.top]; return *self.scan_stack.get(self.top);
} }
pub fn scan_pop_bottom(&mut self) -> uint { pub fn scan_pop_bottom(&mut self) -> uint {
assert!((!self.scan_stack_empty)); assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom]; let x = *self.scan_stack.get(self.bottom);
if self.top == self.bottom { if self.top == self.bottom {
self.scan_stack_empty = true; self.scan_stack_empty = true;
} else { self.bottom += 1u; self.bottom %= self.buf_len; } } else {
self.bottom += 1u; self.bottom %= self.buf_len;
}
return x; return x;
} }
pub fn advance_right(&mut self) { pub fn advance_right(&mut self) {
@ -435,8 +442,9 @@ impl Printer {
if self.left != self.right { if self.left != self.right {
self.left += 1u; self.left += 1u;
self.left %= self.buf_len; self.left %= self.buf_len;
let left = self.token[self.left].clone(); let left = (*self.token.get(self.left)).clone();
try!(self.advance_left(left, self.size[self.left])); let left_size = *self.size.get(self.left);
try!(self.advance_left(left, left_size));
} }
ret ret
} else { } else {
@ -446,22 +454,28 @@ impl Printer {
pub fn check_stack(&mut self, k: int) { pub fn check_stack(&mut self, k: int) {
if !self.scan_stack_empty { if !self.scan_stack_empty {
let x = self.scan_top(); let x = self.scan_top();
match self.token[x] { match self.token.get(x) {
Begin(_) => { &Begin(_) => {
if k > 0 { if k > 0 {
self.size[self.scan_pop()] = self.size[x] + let popped = self.scan_pop();
*self.size.get_mut(popped) = *self.size.get(x) +
self.right_total; self.right_total;
self.check_stack(k - 1); self.check_stack(k - 1);
} }
} }
End => { &End => {
// paper says + not =, but that makes no sense. // paper says + not =, but that makes no sense.
self.size[self.scan_pop()] = 1; let popped = self.scan_pop();
*self.size.get_mut(popped) = 1;
self.check_stack(k + 1); self.check_stack(k + 1);
} }
_ => { _ => {
self.size[self.scan_pop()] = self.size[x] + self.right_total; let popped = self.scan_pop();
if k > 0 { self.check_stack(k); } *self.size.get_mut(popped) = *self.size.get(x) +
self.right_total;
if k > 0 {
self.check_stack(k);
}
} }
} }
} }
@ -481,7 +495,7 @@ impl Printer {
let print_stack = &mut self.print_stack; let print_stack = &mut self.print_stack;
let n = print_stack.len(); let n = print_stack.len();
if n != 0u { if n != 0u {
print_stack[n - 1u] *print_stack.get(n - 1u)
} else { } else {
PrintStackElem { PrintStackElem {
offset: 0, offset: 0,

View file

@ -33,6 +33,7 @@ use std::char;
use std::str; use std::str;
use std::io; use std::io;
use std::io::MemWriter; use std::io::MemWriter;
use std::vec_ng::Vec;
// The &mut State is stored here to prevent recursive type. // The &mut State is stored here to prevent recursive type.
pub enum AnnNode<'a, 'b> { pub enum AnnNode<'a, 'b> {
@ -147,7 +148,7 @@ pub fn print_crate(cm: @CodeMap,
} }
pub fn print_crate_(s: &mut State, krate: &ast::Crate) -> io::IoResult<()> { pub fn print_crate_(s: &mut State, krate: &ast::Crate) -> io::IoResult<()> {
try!(print_mod(s, &krate.module, krate.attrs)); try!(print_mod(s, &krate.module, krate.attrs.as_slice()));
try!(print_remaining_comments(s)); try!(print_remaining_comments(s));
try!(eof(&mut s.s)); try!(eof(&mut s.s));
Ok(()) Ok(())
@ -319,7 +320,7 @@ pub fn in_cbox(s: &mut State) -> bool {
let boxes = s.boxes.borrow(); let boxes = s.boxes.borrow();
let len = boxes.get().len(); let len = boxes.get().len();
if len == 0u { return false; } if len == 0u { return false; }
return boxes.get()[len - 1u] == pp::Consistent; return *boxes.get().get(len - 1u) == pp::Consistent;
} }
pub fn hardbreak_if_not_bol(s: &mut State) -> io::IoResult<()> { pub fn hardbreak_if_not_bol(s: &mut State) -> io::IoResult<()> {
@ -463,7 +464,7 @@ pub fn print_type(s: &mut State, ty: &ast::Ty) -> io::IoResult<()> {
} }
ast::TyTup(ref elts) => { ast::TyTup(ref elts) => {
try!(popen(s)); try!(popen(s));
try!(commasep(s, Inconsistent, *elts, print_type_ref)); try!(commasep(s, Inconsistent, elts.as_slice(), print_type_ref));
if elts.len() == 1 { if elts.len() == 1 {
try!(word(&mut s.s, ",")); try!(word(&mut s.s, ","));
} }
@ -517,7 +518,7 @@ pub fn print_foreign_item(s: &mut State,
item: &ast::ForeignItem) -> io::IoResult<()> { item: &ast::ForeignItem) -> io::IoResult<()> {
try!(hardbreak_if_not_bol(s)); try!(hardbreak_if_not_bol(s));
try!(maybe_print_comment(s, item.span.lo)); try!(maybe_print_comment(s, item.span.lo));
try!(print_outer_attributes(s, item.attrs)); try!(print_outer_attributes(s, item.attrs.as_slice()));
match item.node { match item.node {
ast::ForeignItemFn(decl, ref generics) => { ast::ForeignItemFn(decl, ref generics) => {
try!(print_fn(s, decl, None, AbiSet::Rust(), item.ident, generics, try!(print_fn(s, decl, None, AbiSet::Rust(), item.ident, generics,
@ -545,7 +546,7 @@ pub fn print_foreign_item(s: &mut State,
pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> {
try!(hardbreak_if_not_bol(s)); try!(hardbreak_if_not_bol(s));
try!(maybe_print_comment(s, item.span.lo)); try!(maybe_print_comment(s, item.span.lo));
try!(print_outer_attributes(s, item.attrs)); try!(print_outer_attributes(s, item.attrs.as_slice()));
{ {
let ann_node = NodeItem(s, item); let ann_node = NodeItem(s, item);
try!(s.ann.pre(ann_node)); try!(s.ann.pre(ann_node));
@ -580,21 +581,21 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> {
item.vis item.vis
)); ));
try!(word(&mut s.s, " ")); try!(word(&mut s.s, " "));
try!(print_block_with_attrs(s, body, item.attrs)); try!(print_block_with_attrs(s, body, item.attrs.as_slice()));
} }
ast::ItemMod(ref _mod) => { ast::ItemMod(ref _mod) => {
try!(head(s, visibility_qualified(item.vis, "mod"))); try!(head(s, visibility_qualified(item.vis, "mod")));
try!(print_ident(s, item.ident)); try!(print_ident(s, item.ident));
try!(nbsp(s)); try!(nbsp(s));
try!(bopen(s)); try!(bopen(s));
try!(print_mod(s, _mod, item.attrs)); try!(print_mod(s, _mod, item.attrs.as_slice()));
try!(bclose(s, item.span)); try!(bclose(s, item.span));
} }
ast::ItemForeignMod(ref nmod) => { ast::ItemForeignMod(ref nmod) => {
try!(head(s, "extern")); try!(head(s, "extern"));
try!(word_nbsp(s, nmod.abis.to_str())); try!(word_nbsp(s, nmod.abis.to_str()));
try!(bopen(s)); try!(bopen(s));
try!(print_foreign_mod(s, nmod, item.attrs)); try!(print_foreign_mod(s, nmod, item.attrs.as_slice()));
try!(bclose(s, item.span)); try!(bclose(s, item.span));
} }
ast::ItemTy(ty, ref params) => { ast::ItemTy(ty, ref params) => {
@ -646,7 +647,7 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> {
try!(space(&mut s.s)); try!(space(&mut s.s));
try!(bopen(s)); try!(bopen(s));
try!(print_inner_attributes(s, item.attrs)); try!(print_inner_attributes(s, item.attrs.as_slice()));
for meth in methods.iter() { for meth in methods.iter() {
try!(print_method(s, *meth)); try!(print_method(s, *meth));
} }
@ -706,7 +707,7 @@ pub fn print_enum_def(s: &mut State, enum_definition: &ast::EnumDef,
try!(print_ident(s, ident)); try!(print_ident(s, ident));
try!(print_generics(s, generics)); try!(print_generics(s, generics));
try!(space(&mut s.s)); try!(space(&mut s.s));
try!(print_variants(s, enum_definition.variants, span)); try!(print_variants(s, enum_definition.variants.as_slice(), span));
Ok(()) Ok(())
} }
@ -717,7 +718,7 @@ pub fn print_variants(s: &mut State,
for &v in variants.iter() { for &v in variants.iter() {
try!(space_if_not_bol(s)); try!(space_if_not_bol(s));
try!(maybe_print_comment(s, v.span.lo)); try!(maybe_print_comment(s, v.span.lo));
try!(print_outer_attributes(s, v.node.attrs)); try!(print_outer_attributes(s, v.node.attrs.as_slice()));
try!(ibox(s, indent_unit)); try!(ibox(s, indent_unit));
try!(print_variant(s, v)); try!(print_variant(s, v));
try!(word(&mut s.s, ",")); try!(word(&mut s.s, ","));
@ -761,7 +762,10 @@ pub fn print_struct(s: &mut State,
if ast_util::struct_def_is_tuple_like(struct_def) { if ast_util::struct_def_is_tuple_like(struct_def) {
if !struct_def.fields.is_empty() { if !struct_def.fields.is_empty() {
try!(popen(s)); try!(popen(s));
try!(commasep(s, Inconsistent, struct_def.fields, |s, field| { try!(commasep(s,
Inconsistent,
struct_def.fields.as_slice(),
|s, field| {
match field.node.kind { match field.node.kind {
ast::NamedField(..) => fail!("unexpected named field"), ast::NamedField(..) => fail!("unexpected named field"),
ast::UnnamedField => { ast::UnnamedField => {
@ -787,7 +791,8 @@ pub fn print_struct(s: &mut State,
ast::NamedField(ident, visibility) => { ast::NamedField(ident, visibility) => {
try!(hardbreak_if_not_bol(s)); try!(hardbreak_if_not_bol(s));
try!(maybe_print_comment(s, field.span.lo)); try!(maybe_print_comment(s, field.span.lo));
try!(print_outer_attributes(s, field.node.attrs)); try!(print_outer_attributes(s,
field.node.attrs.as_slice()));
try!(print_visibility(s, visibility)); try!(print_visibility(s, visibility));
try!(print_ident(s, ident)); try!(print_ident(s, ident));
try!(word_nbsp(s, ":")); try!(word_nbsp(s, ":"));
@ -857,7 +862,10 @@ pub fn print_variant(s: &mut State, v: &ast::Variant) -> io::IoResult<()> {
arg: &ast::VariantArg) -> io::IoResult<()> { arg: &ast::VariantArg) -> io::IoResult<()> {
print_type(s, arg.ty) print_type(s, arg.ty)
} }
try!(commasep(s, Consistent, *args, print_variant_arg)); try!(commasep(s,
Consistent,
args.as_slice(),
print_variant_arg));
try!(pclose(s)); try!(pclose(s));
} }
} }
@ -881,7 +889,7 @@ pub fn print_variant(s: &mut State, v: &ast::Variant) -> io::IoResult<()> {
pub fn print_ty_method(s: &mut State, m: &ast::TypeMethod) -> io::IoResult<()> { pub fn print_ty_method(s: &mut State, m: &ast::TypeMethod) -> io::IoResult<()> {
try!(hardbreak_if_not_bol(s)); try!(hardbreak_if_not_bol(s));
try!(maybe_print_comment(s, m.span.lo)); try!(maybe_print_comment(s, m.span.lo));
try!(print_outer_attributes(s, m.attrs)); try!(print_outer_attributes(s, m.attrs.as_slice()));
try!(print_ty_fn(s, try!(print_ty_fn(s,
None, None,
None, None,
@ -907,12 +915,12 @@ pub fn print_trait_method(s: &mut State,
pub fn print_method(s: &mut State, meth: &ast::Method) -> io::IoResult<()> { pub fn print_method(s: &mut State, meth: &ast::Method) -> io::IoResult<()> {
try!(hardbreak_if_not_bol(s)); try!(hardbreak_if_not_bol(s));
try!(maybe_print_comment(s, meth.span.lo)); try!(maybe_print_comment(s, meth.span.lo));
try!(print_outer_attributes(s, meth.attrs)); try!(print_outer_attributes(s, meth.attrs.as_slice()));
try!(print_fn(s, meth.decl, Some(meth.purity), AbiSet::Rust(), try!(print_fn(s, meth.decl, Some(meth.purity), AbiSet::Rust(),
meth.ident, &meth.generics, Some(meth.explicit_self.node), meth.ident, &meth.generics, Some(meth.explicit_self.node),
meth.vis)); meth.vis));
try!(word(&mut s.s, " ")); try!(word(&mut s.s, " "));
print_block_with_attrs(s, meth.body, meth.attrs) print_block_with_attrs(s, meth.body, meth.attrs.as_slice())
} }
pub fn print_outer_attributes(s: &mut State, pub fn print_outer_attributes(s: &mut State,
@ -1184,7 +1192,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> {
try!(word(&mut s.s, "mut")); try!(word(&mut s.s, "mut"));
if exprs.len() > 0u { try!(nbsp(s)); } if exprs.len() > 0u { try!(nbsp(s)); }
} }
try!(commasep_exprs(s, Inconsistent, *exprs)); try!(commasep_exprs(s, Inconsistent, exprs.as_slice()));
try!(word(&mut s.s, "]")); try!(word(&mut s.s, "]"));
try!(end(s)); try!(end(s));
} }
@ -1207,7 +1215,11 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> {
ast::ExprStruct(ref path, ref fields, wth) => { ast::ExprStruct(ref path, ref fields, wth) => {
try!(print_path(s, path, true)); try!(print_path(s, path, true));
try!(word(&mut s.s, "{")); try!(word(&mut s.s, "{"));
try!(commasep_cmnt(s, Consistent, (*fields), print_field, get_span)); try!(commasep_cmnt(s,
Consistent,
fields.as_slice(),
print_field,
get_span));
match wth { match wth {
Some(expr) => { Some(expr) => {
try!(ibox(s, indent_unit)); try!(ibox(s, indent_unit));
@ -1225,7 +1237,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> {
} }
ast::ExprTup(ref exprs) => { ast::ExprTup(ref exprs) => {
try!(popen(s)); try!(popen(s));
try!(commasep_exprs(s, Inconsistent, *exprs)); try!(commasep_exprs(s, Inconsistent, exprs.as_slice()));
if exprs.len() == 1 { if exprs.len() == 1 {
try!(word(&mut s.s, ",")); try!(word(&mut s.s, ","));
} }
@ -1233,16 +1245,16 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> {
} }
ast::ExprCall(func, ref args) => { ast::ExprCall(func, ref args) => {
try!(print_expr(s, func)); try!(print_expr(s, func));
try!(print_call_post(s, *args)); try!(print_call_post(s, args.as_slice()));
} }
ast::ExprMethodCall(ident, ref tys, ref args) => { ast::ExprMethodCall(ident, ref tys, ref args) => {
let base_args = args.slice_from(1); let base_args = args.slice_from(1);
try!(print_expr(s, args[0])); try!(print_expr(s, *args.get(0)));
try!(word(&mut s.s, ".")); try!(word(&mut s.s, "."));
try!(print_ident(s, ident)); try!(print_ident(s, ident));
if tys.len() > 0u { if tys.len() > 0u {
try!(word(&mut s.s, "::<")); try!(word(&mut s.s, "::<"));
try!(commasep(s, Inconsistent, *tys, print_type_ref)); try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref));
try!(word(&mut s.s, ">")); try!(word(&mut s.s, ">"));
} }
try!(print_call_post(s, base_args)); try!(print_call_post(s, base_args));
@ -1455,7 +1467,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> {
try!(print_ident(s, id)); try!(print_ident(s, id));
if tys.len() > 0u { if tys.len() > 0u {
try!(word(&mut s.s, "::<")); try!(word(&mut s.s, "::<"));
try!(commasep(s, Inconsistent, *tys, print_type_ref)); try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref));
try!(word(&mut s.s, ">")); try!(word(&mut s.s, ">"));
} }
} }
@ -1649,7 +1661,7 @@ fn print_path_(s: &mut State,
} }
try!(commasep(s, try!(commasep(s,
Inconsistent, Inconsistent,
segment.types.map_to_vec(|&t| t), segment.types.map_to_vec(|&t| t).as_slice(),
print_type_ref)); print_type_ref));
} }
@ -1708,7 +1720,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
Some(ref args) => { Some(ref args) => {
if !args.is_empty() { if !args.is_empty() {
try!(popen(s)); try!(popen(s));
try!(commasep(s, Inconsistent, *args, try!(commasep(s, Inconsistent, args.as_slice(),
|s, &p| print_pat(s, p))); |s, &p| print_pat(s, p)));
try!(pclose(s)); try!(pclose(s));
} else { } } else { }
@ -1727,7 +1739,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
Ok(()) Ok(())
} }
fn get_span(f: &ast::FieldPat) -> codemap::Span { return f.pat.span; } fn get_span(f: &ast::FieldPat) -> codemap::Span { return f.pat.span; }
try!(commasep_cmnt(s, Consistent, *fields, try!(commasep_cmnt(s, Consistent, fields.as_slice(),
|s, f| print_field(s,f), |s, f| print_field(s,f),
get_span)); get_span));
if etc { if etc {
@ -1738,7 +1750,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
} }
ast::PatTup(ref elts) => { ast::PatTup(ref elts) => {
try!(popen(s)); try!(popen(s));
try!(commasep(s, Inconsistent, *elts, |s, &p| print_pat(s, p))); try!(commasep(s,
Inconsistent,
elts.as_slice(),
|s, &p| print_pat(s, p)));
if elts.len() == 1 { if elts.len() == 1 {
try!(word(&mut s.s, ",")); try!(word(&mut s.s, ","));
} }
@ -1761,7 +1776,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
} }
ast::PatVec(ref before, slice, ref after) => { ast::PatVec(ref before, slice, ref after) => {
try!(word(&mut s.s, "[")); try!(word(&mut s.s, "["));
try!(commasep(s, Inconsistent, *before, |s, &p| print_pat(s, p))); try!(commasep(s,
Inconsistent,
before.as_slice(),
|s, &p| print_pat(s, p)));
for &p in slice.iter() { for &p in slice.iter() {
if !before.is_empty() { try!(word_space(s, ",")); } if !before.is_empty() { try!(word_space(s, ",")); }
match *p { match *p {
@ -1773,7 +1791,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
try!(print_pat(s, p)); try!(print_pat(s, p));
if !after.is_empty() { try!(word_space(s, ",")); } if !after.is_empty() { try!(word_space(s, ",")); }
} }
try!(commasep(s, Inconsistent, *after, |s, &p| print_pat(s, p))); try!(commasep(s,
Inconsistent,
after.as_slice(),
|s, &p| print_pat(s, p)));
try!(word(&mut s.s, "]")); try!(word(&mut s.s, "]"));
} }
} }
@ -1842,7 +1863,7 @@ pub fn print_fn_args(s: &mut State, decl: &ast::FnDecl,
for &explicit_self in opt_explicit_self.iter() { for &explicit_self in opt_explicit_self.iter() {
let m = match explicit_self { let m = match explicit_self {
ast::SelfStatic => ast::MutImmutable, ast::SelfStatic => ast::MutImmutable,
_ => match decl.inputs[0].pat.node { _ => match decl.inputs.get(0).pat.node {
ast::PatIdent(ast::BindByValue(m), _, _) => m, ast::PatIdent(ast::BindByValue(m), _, _) => m,
_ => ast::MutImmutable _ => ast::MutImmutable
} }
@ -1986,7 +2007,7 @@ pub fn print_generics(s: &mut State,
ints.push(i); ints.push(i);
} }
try!(commasep(s, Inconsistent, ints, try!(commasep(s, Inconsistent, ints.as_slice(),
|s, &i| print_item(s, generics, i))); |s, &i| print_item(s, generics, i)));
try!(word(&mut s.s, ">")); try!(word(&mut s.s, ">"));
} }
@ -2041,7 +2062,7 @@ pub fn print_view_path(s: &mut State, vp: &ast::ViewPath) -> io::IoResult<()> {
try!(print_path(s, path, false)); try!(print_path(s, path, false));
try!(word(&mut s.s, "::{")); try!(word(&mut s.s, "::{"));
} }
try!(commasep(s, Inconsistent, (*idents), |s, w| { try!(commasep(s, Inconsistent, idents.as_slice(), |s, w| {
print_ident(s, w.node.name) print_ident(s, w.node.name)
})); }));
word(&mut s.s, "}") word(&mut s.s, "}")
@ -2057,7 +2078,7 @@ pub fn print_view_paths(s: &mut State,
pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> { pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> {
try!(hardbreak_if_not_bol(s)); try!(hardbreak_if_not_bol(s));
try!(maybe_print_comment(s, item.span.lo)); try!(maybe_print_comment(s, item.span.lo));
try!(print_outer_attributes(s, item.attrs)); try!(print_outer_attributes(s, item.attrs.as_slice()));
try!(print_visibility(s, item.vis)); try!(print_visibility(s, item.vis));
match item.node { match item.node {
ast::ViewItemExternMod(id, ref optional_path, _) => { ast::ViewItemExternMod(id, ref optional_path, _) => {
@ -2073,7 +2094,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()>
ast::ViewItemUse(ref vps) => { ast::ViewItemUse(ref vps) => {
try!(head(s, "use")); try!(head(s, "use"));
try!(print_view_paths(s, *vps)); try!(print_view_paths(s, vps.as_slice()));
} }
} }
try!(word(&mut s.s, ";")); try!(word(&mut s.s, ";"));
@ -2103,7 +2124,7 @@ pub fn print_arg(s: &mut State, input: &ast::Arg) -> io::IoResult<()> {
match input.pat.node { match input.pat.node {
ast::PatIdent(_, ref path, _) if ast::PatIdent(_, ref path, _) if
path.segments.len() == 1 && path.segments.len() == 1 &&
path.segments[0].identifier.name == path.segments.get(0).identifier.name ==
parse::token::special_idents::invalid.name => { parse::token::special_idents::invalid.name => {
// Do nothing. // Do nothing.
} }
@ -2286,7 +2307,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) -> io::IoResult<()> {
ast::LitBinary(ref arr) => { ast::LitBinary(ref arr) => {
try!(ibox(s, indent_unit)); try!(ibox(s, indent_unit));
try!(word(&mut s.s, "[")); try!(word(&mut s.s, "["));
try!(commasep_cmnt(s, Inconsistent, *arr.borrow(), try!(commasep_cmnt(s, Inconsistent, arr.borrow().as_slice(),
|s, u| word(&mut s.s, format!("{}", *u)), |s, u| word(&mut s.s, format!("{}", *u)),
|_| lit.span)); |_| lit.span));
try!(word(&mut s.s, "]")); try!(word(&mut s.s, "]"));
@ -2303,7 +2324,7 @@ pub fn next_lit(s: &mut State, pos: BytePos) -> Option<comments::Literal> {
match s.literals { match s.literals {
Some(ref lits) => { Some(ref lits) => {
while s.cur_cmnt_and_lit.cur_lit < lits.len() { while s.cur_cmnt_and_lit.cur_lit < lits.len() {
let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit].clone(); let ltrl = (*(*lits).get(s.cur_cmnt_and_lit.cur_lit)).clone();
if ltrl.pos > pos { return None; } if ltrl.pos > pos { return None; }
s.cur_cmnt_and_lit.cur_lit += 1u; s.cur_cmnt_and_lit.cur_lit += 1u;
if ltrl.pos == pos { return Some(ltrl); } if ltrl.pos == pos { return Some(ltrl); }
@ -2335,7 +2356,7 @@ pub fn print_comment(s: &mut State,
comments::Mixed => { comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1u); assert_eq!(cmnt.lines.len(), 1u);
try!(zerobreak(&mut s.s)); try!(zerobreak(&mut s.s));
try!(word(&mut s.s, cmnt.lines[0])); try!(word(&mut s.s, *cmnt.lines.get(0)));
try!(zerobreak(&mut s.s)); try!(zerobreak(&mut s.s));
} }
comments::Isolated => { comments::Isolated => {
@ -2352,7 +2373,7 @@ pub fn print_comment(s: &mut State,
comments::Trailing => { comments::Trailing => {
try!(word(&mut s.s, " ")); try!(word(&mut s.s, " "));
if cmnt.lines.len() == 1u { if cmnt.lines.len() == 1u {
try!(word(&mut s.s, cmnt.lines[0])); try!(word(&mut s.s, *cmnt.lines.get(0)));
try!(hardbreak(&mut s.s)); try!(hardbreak(&mut s.s));
} else { } else {
try!(ibox(s, 0u)); try!(ibox(s, 0u));
@ -2414,7 +2435,7 @@ pub fn next_comment(s: &mut State) -> Option<comments::Comment> {
match s.comments { match s.comments {
Some(ref cmnts) => { Some(ref cmnts) => {
if s.cur_cmnt_and_lit.cur_cmnt < cmnts.len() { if s.cur_cmnt_and_lit.cur_cmnt < cmnts.len() {
Some(cmnts[s.cur_cmnt_and_lit.cur_cmnt].clone()) Some((*cmnts.get(s.cur_cmnt_and_lit.cur_cmnt)).clone())
} else { } else {
None None
} }
@ -2535,6 +2556,8 @@ mod test {
use codemap; use codemap;
use parse::token; use parse::token;
use std::vec_ng::Vec;
#[test] #[test]
fn test_fun_to_str() { fn test_fun_to_str() {
let abba_ident = token::str_to_ident("abba"); let abba_ident = token::str_to_ident("abba");

View file

@ -21,6 +21,7 @@ use std::cmp::Equiv;
use std::fmt; use std::fmt;
use std::hash::Hash; use std::hash::Hash;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
pub struct Interner<T> { pub struct Interner<T> {
priv map: RefCell<HashMap<T, Name>>, priv map: RefCell<HashMap<T, Name>>,
@ -68,7 +69,7 @@ impl<T:Eq + Hash + Freeze + Clone + 'static> Interner<T> {
pub fn get(&self, idx: Name) -> T { pub fn get(&self, idx: Name) -> T {
let vect = self.vect.borrow(); let vect = self.vect.borrow();
vect.get()[idx].clone() (*vect.get().get(idx as uint)).clone()
} }
pub fn len(&self) -> uint { pub fn len(&self) -> uint {
@ -189,21 +190,21 @@ impl StrInterner {
let new_idx = self.len() as Name; let new_idx = self.len() as Name;
// leave out of map to avoid colliding // leave out of map to avoid colliding
let mut vect = self.vect.borrow_mut(); let mut vect = self.vect.borrow_mut();
let existing = vect.get()[idx].clone(); let existing = (*vect.get().get(idx as uint)).clone();
vect.get().push(existing); vect.get().push(existing);
new_idx new_idx
} }
pub fn get(&self, idx: Name) -> RcStr { pub fn get(&self, idx: Name) -> RcStr {
let vect = self.vect.borrow(); let vect = self.vect.borrow();
vect.get()[idx].clone() (*vect.get().get(idx as uint)).clone()
} }
/// Returns this string with lifetime tied to the interner. Since /// Returns this string with lifetime tied to the interner. Since
/// strings may never be removed from the interner, this is safe. /// strings may never be removed from the interner, this is safe.
pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str { pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str {
let vect = self.vect.borrow(); let vect = self.vect.borrow();
let s: &str = vect.get()[idx].as_slice(); let s: &str = vect.get().get(idx as uint).as_slice();
unsafe { unsafe {
cast::transmute(s) cast::transmute(s)
} }

View file

@ -15,6 +15,8 @@ use parse::{new_parser_from_source_str};
use parse::parser::Parser; use parse::parser::Parser;
use parse::token; use parse::token;
use std::vec_ng::Vec;
// map a string to tts, using a made-up filename: return both the TokenTree's // map a string to tts, using a made-up filename: return both the TokenTree's
// and the ParseSess // and the ParseSess
pub fn string_to_tts_and_sess (source_str : ~str) -> (Vec<ast::TokenTree> , @ParseSess) { pub fn string_to_tts_and_sess (source_str : ~str) -> (Vec<ast::TokenTree> , @ParseSess) {

View file

@ -7,8 +7,10 @@
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use std::mem; use std::mem;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
/// A vector type optimized for cases where the size is almost always 0 or 1 /// A vector type optimized for cases where the size is almost always 0 or 1
pub enum SmallVector<T> { pub enum SmallVector<T> {
@ -73,7 +75,7 @@ impl<T> SmallVector<T> {
pub fn get<'a>(&'a self, idx: uint) -> &'a T { pub fn get<'a>(&'a self, idx: uint) -> &'a T {
match *self { match *self {
One(ref v) if idx == 0 => v, One(ref v) if idx == 0 => v,
Many(ref vs) => &vs[idx], Many(ref vs) => vs.get(idx),
_ => fail!("out of bounds access") _ => fail!("out of bounds access")
} }
} }
@ -104,7 +106,7 @@ impl<T> SmallVector<T> {
pub enum MoveItems<T> { pub enum MoveItems<T> {
priv ZeroIterator, priv ZeroIterator,
priv OneIterator(T), priv OneIterator(T),
priv ManyIterator(vec::MoveItems<T>), priv ManyIterator(vec_ng::MoveItems<T>),
} }
impl<T> Iterator<T> for MoveItems<T> { impl<T> Iterator<T> for MoveItems<T> {
@ -136,6 +138,8 @@ impl<T> Iterator<T> for MoveItems<T> {
mod test { mod test {
use super::*; use super::*;
use std::vec_ng::Vec;
#[test] #[test]
fn test_len() { fn test_len() {
let v: SmallVector<int> = SmallVector::zero(); let v: SmallVector<int> = SmallVector::zero();

View file

@ -637,7 +637,7 @@ pub fn walk_expr<E: Clone, V: Visitor<E>>(visitor: &mut V, expression: &Expr, en
visitor.visit_expr(subexpression, env.clone()) visitor.visit_expr(subexpression, env.clone())
} }
ExprVec(ref subexpressions, _) => { ExprVec(ref subexpressions, _) => {
walk_exprs(visitor, *subexpressions, env.clone()) walk_exprs(visitor, subexpressions.as_slice(), env.clone())
} }
ExprRepeat(element, count, _) => { ExprRepeat(element, count, _) => {
visitor.visit_expr(element, env.clone()); visitor.visit_expr(element, env.clone());
@ -662,7 +662,7 @@ pub fn walk_expr<E: Clone, V: Visitor<E>>(visitor: &mut V, expression: &Expr, en
visitor.visit_expr(callee_expression, env.clone()) visitor.visit_expr(callee_expression, env.clone())
} }
ExprMethodCall(_, ref types, ref arguments) => { ExprMethodCall(_, ref types, ref arguments) => {
walk_exprs(visitor, *arguments, env.clone()); walk_exprs(visitor, arguments.as_slice(), env.clone());
for &typ in types.iter() { for &typ in types.iter() {
visitor.visit_ty(typ, env.clone()) visitor.visit_ty(typ, env.clone())
} }