libsyntax: uint types to usize
This commit is contained in:
parent
89c4e3792d
commit
a32249d447
29 changed files with 165 additions and 165 deletions
|
@ -105,8 +105,8 @@ pub fn all_names() -> Vec<&'static str> {
|
||||||
|
|
||||||
impl Abi {
|
impl Abi {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn index(&self) -> uint {
|
pub fn index(&self) -> usize {
|
||||||
*self as uint
|
*self as usize
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -152,7 +152,7 @@ impl PartialEq for Ident {
|
||||||
|
|
||||||
/// A SyntaxContext represents a chain of macro-expandings
|
/// A SyntaxContext represents a chain of macro-expandings
|
||||||
/// and renamings. Each macro expansion corresponds to
|
/// and renamings. Each macro expansion corresponds to
|
||||||
/// a fresh uint
|
/// a fresh usize
|
||||||
|
|
||||||
// I'm representing this syntax context as an index into
|
// I'm representing this syntax context as an index into
|
||||||
// a table, in order to work around a compiler bug
|
// a table, in order to work around a compiler bug
|
||||||
|
@ -181,9 +181,9 @@ impl Name {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn uint(&self) -> uint {
|
pub fn uint(&self) -> usize {
|
||||||
let Name(nm) = *self;
|
let Name(nm) = *self;
|
||||||
nm as uint
|
nm as usize
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ident(&self) -> Ident {
|
pub fn ident(&self) -> Ident {
|
||||||
|
@ -740,7 +740,7 @@ pub enum Expr_ {
|
||||||
ExprAssign(P<Expr>, P<Expr>),
|
ExprAssign(P<Expr>, P<Expr>),
|
||||||
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
|
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
|
||||||
ExprField(P<Expr>, SpannedIdent),
|
ExprField(P<Expr>, SpannedIdent),
|
||||||
ExprTupField(P<Expr>, Spanned<uint>),
|
ExprTupField(P<Expr>, Spanned<usize>),
|
||||||
ExprIndex(P<Expr>, P<Expr>),
|
ExprIndex(P<Expr>, P<Expr>),
|
||||||
ExprRange(Option<P<Expr>>, Option<P<Expr>>),
|
ExprRange(Option<P<Expr>>, Option<P<Expr>>),
|
||||||
|
|
||||||
|
@ -839,7 +839,7 @@ pub struct SequenceRepetition {
|
||||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||||
pub op: KleeneOp,
|
pub op: KleeneOp,
|
||||||
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
|
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
|
||||||
pub num_captures: uint,
|
pub num_captures: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||||
|
@ -878,7 +878,7 @@ pub enum TokenTree {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTree {
|
impl TokenTree {
|
||||||
pub fn len(&self) -> uint {
|
pub fn len(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
TtToken(_, token::DocComment(_)) => 2,
|
TtToken(_, token::DocComment(_)) => 2,
|
||||||
TtToken(_, token::SpecialVarNt(..)) => 2,
|
TtToken(_, token::SpecialVarNt(..)) => 2,
|
||||||
|
@ -893,7 +893,7 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_tt(&self, index: uint) -> TokenTree {
|
pub fn get_tt(&self, index: usize) -> TokenTree {
|
||||||
match (self, index) {
|
match (self, index) {
|
||||||
(&TtToken(sp, token::DocComment(_)), 0) => {
|
(&TtToken(sp, token::DocComment(_)), 0) => {
|
||||||
TtToken(sp, token::Pound)
|
TtToken(sp, token::Pound)
|
||||||
|
@ -963,7 +963,7 @@ pub enum Mac_ {
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
|
||||||
pub enum StrStyle {
|
pub enum StrStyle {
|
||||||
CookedStr,
|
CookedStr,
|
||||||
RawStr(uint)
|
RawStr(usize)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Lit = Spanned<Lit_>;
|
pub type Lit = Spanned<Lit_>;
|
||||||
|
@ -992,7 +992,7 @@ pub enum LitIntType {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LitIntType {
|
impl LitIntType {
|
||||||
pub fn suffix_len(&self) -> uint {
|
pub fn suffix_len(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
UnsuffixedIntLit(_) => 0,
|
UnsuffixedIntLit(_) => 0,
|
||||||
SignedIntLit(s, _) => s.suffix_len(),
|
SignedIntLit(s, _) => s.suffix_len(),
|
||||||
|
@ -1113,7 +1113,7 @@ impl fmt::String for IntTy {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntTy {
|
impl IntTy {
|
||||||
pub fn suffix_len(&self) -> uint {
|
pub fn suffix_len(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
TyIs(true) /* i */ => 1,
|
TyIs(true) /* i */ => 1,
|
||||||
TyIs(false) /* is */ | TyI8 => 2,
|
TyIs(false) /* is */ | TyI8 => 2,
|
||||||
|
@ -1146,7 +1146,7 @@ impl PartialEq for UintTy {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UintTy {
|
impl UintTy {
|
||||||
pub fn suffix_len(&self) -> uint {
|
pub fn suffix_len(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
TyUs(true) /* u */ => 1,
|
TyUs(true) /* u */ => 1,
|
||||||
TyUs(false) /* us */ | TyU8 => 2,
|
TyUs(false) /* us */ | TyU8 => 2,
|
||||||
|
@ -1186,7 +1186,7 @@ impl fmt::String for FloatTy {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FloatTy {
|
impl FloatTy {
|
||||||
pub fn suffix_len(&self) -> uint {
|
pub fn suffix_len(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
TyF32 | TyF64 => 3, // add F128 handling here
|
TyF32 | TyF64 => 3, // add F128 handling here
|
||||||
}
|
}
|
||||||
|
@ -1274,7 +1274,7 @@ pub enum Ty_ {
|
||||||
TyPtr(MutTy),
|
TyPtr(MutTy),
|
||||||
/// A reference (`&'a T` or `&'a mut T`)
|
/// A reference (`&'a T` or `&'a mut T`)
|
||||||
TyRptr(Option<Lifetime>, MutTy),
|
TyRptr(Option<Lifetime>, MutTy),
|
||||||
/// A bare function (e.g. `fn(uint) -> bool`)
|
/// A bare function (e.g. `fn(usize) -> bool`)
|
||||||
TyBareFn(P<BareFnTy>),
|
TyBareFn(P<BareFnTy>),
|
||||||
/// A tuple (`(A, B, C, D,...)`)
|
/// A tuple (`(A, B, C, D,...)`)
|
||||||
TyTup(Vec<P<Ty>> ),
|
TyTup(Vec<P<Ty>> ),
|
||||||
|
@ -1566,7 +1566,7 @@ pub enum AttrStyle {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
|
||||||
pub struct AttrId(pub uint);
|
pub struct AttrId(pub usize);
|
||||||
|
|
||||||
/// Doc-comments are promoted to attributes that have is_sugared_doc = true
|
/// Doc-comments are promoted to attributes that have is_sugared_doc = true
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)]
|
||||||
|
|
|
@ -264,12 +264,12 @@ pub struct Map<'ast> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'ast> Map<'ast> {
|
impl<'ast> Map<'ast> {
|
||||||
fn entry_count(&self) -> uint {
|
fn entry_count(&self) -> usize {
|
||||||
self.map.borrow().len()
|
self.map.borrow().len()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
|
fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
|
||||||
self.map.borrow().get(id as uint).map(|e| *e)
|
self.map.borrow().get(id as usize).map(|e| *e)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn krate(&self) -> &'ast Crate {
|
pub fn krate(&self) -> &'ast Crate {
|
||||||
|
@ -652,7 +652,7 @@ impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> {
|
||||||
fn next(&mut self) -> Option<NodeId> {
|
fn next(&mut self) -> Option<NodeId> {
|
||||||
loop {
|
loop {
|
||||||
let idx = self.idx;
|
let idx = self.idx;
|
||||||
if idx as uint >= self.map.entry_count() {
|
if idx as usize >= self.map.entry_count() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
self.idx += 1;
|
self.idx += 1;
|
||||||
|
@ -744,10 +744,10 @@ impl<'ast> NodeCollector<'ast> {
|
||||||
fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) {
|
fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) {
|
||||||
debug!("ast_map: {:?} => {:?}", id, entry);
|
debug!("ast_map: {:?} => {:?}", id, entry);
|
||||||
let len = self.map.len();
|
let len = self.map.len();
|
||||||
if id as uint >= len {
|
if id as usize >= len {
|
||||||
self.map.extend(repeat(NotPresent).take(id as uint - len + 1));
|
self.map.extend(repeat(NotPresent).take(id as usize - len + 1));
|
||||||
}
|
}
|
||||||
self.map[id as uint] = entry;
|
self.map[id as usize] = entry;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, id: NodeId, node: Node<'ast>) {
|
fn insert(&mut self, id: NodeId, node: Node<'ast>) {
|
||||||
|
|
|
@ -156,7 +156,7 @@ pub fn int_ty_max(t: IntTy) -> u64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a string representation of an unsigned int type, with its value.
|
/// Get a string representation of an unsigned int type, with its value.
|
||||||
/// We want to avoid "42uint" in favor of "42u"
|
/// We want to avoid "42u" in favor of "42us". "42uint" is right out.
|
||||||
pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
|
pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
|
||||||
let s = match t {
|
let s = match t {
|
||||||
TyUs(true) if val.is_some() => "u",
|
TyUs(true) if val.is_some() => "u",
|
||||||
|
@ -319,7 +319,7 @@ pub fn struct_field_visibility(field: ast::StructField) -> Visibility {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps a binary operator to its precedence
|
/// Maps a binary operator to its precedence
|
||||||
pub fn operator_prec(op: ast::BinOp) -> uint {
|
pub fn operator_prec(op: ast::BinOp) -> usize {
|
||||||
match op {
|
match op {
|
||||||
// 'as' sits here with 12
|
// 'as' sits here with 12
|
||||||
BiMul | BiDiv | BiRem => 11u,
|
BiMul | BiDiv | BiRem => 11u,
|
||||||
|
@ -337,7 +337,7 @@ pub fn operator_prec(op: ast::BinOp) -> uint {
|
||||||
/// Precedence of the `as` operator, which is a binary operator
|
/// Precedence of the `as` operator, which is a binary operator
|
||||||
/// not appearing in the prior table.
|
/// not appearing in the prior table.
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
pub static as_prec: uint = 12u;
|
pub static as_prec: usize = 12us;
|
||||||
|
|
||||||
pub fn empty_generics() -> Generics {
|
pub fn empty_generics() -> Generics {
|
||||||
Generics {
|
Generics {
|
||||||
|
|
|
@ -170,7 +170,7 @@ pub fn mk_word_item(name: InternedString) -> P<MetaItem> {
|
||||||
P(dummy_spanned(MetaWord(name)))
|
P(dummy_spanned(MetaWord(name)))
|
||||||
}
|
}
|
||||||
|
|
||||||
thread_local! { static NEXT_ATTR_ID: Cell<uint> = Cell::new(0) }
|
thread_local! { static NEXT_ATTR_ID: Cell<usize> = Cell::new(0) }
|
||||||
|
|
||||||
pub fn mk_attr_id() -> AttrId {
|
pub fn mk_attr_id() -> AttrId {
|
||||||
let id = NEXT_ATTR_ID.with(|slot| {
|
let id = NEXT_ATTR_ID.with(|slot| {
|
||||||
|
|
|
@ -30,8 +30,8 @@ use libc::c_uint;
|
||||||
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||||
|
|
||||||
pub trait Pos {
|
pub trait Pos {
|
||||||
fn from_uint(n: uint) -> Self;
|
fn from_uint(n: usize) -> Self;
|
||||||
fn to_uint(&self) -> uint;
|
fn to_uint(&self) -> usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A byte offset. Keep this small (currently 32-bits), as AST contains
|
/// A byte offset. Keep this small (currently 32-bits), as AST contains
|
||||||
|
@ -43,14 +43,14 @@ pub struct BytePos(pub u32);
|
||||||
/// is not equivalent to a character offset. The CodeMap will convert BytePos
|
/// is not equivalent to a character offset. The CodeMap will convert BytePos
|
||||||
/// values to CharPos values as necessary.
|
/// values to CharPos values as necessary.
|
||||||
#[derive(Copy, PartialEq, Hash, PartialOrd, Show)]
|
#[derive(Copy, PartialEq, Hash, PartialOrd, Show)]
|
||||||
pub struct CharPos(pub uint);
|
pub struct CharPos(pub usize);
|
||||||
|
|
||||||
// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
|
// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
|
||||||
// have been unsuccessful
|
// have been unsuccessful
|
||||||
|
|
||||||
impl Pos for BytePos {
|
impl Pos for BytePos {
|
||||||
fn from_uint(n: uint) -> BytePos { BytePos(n as u32) }
|
fn from_uint(n: usize) -> BytePos { BytePos(n as u32) }
|
||||||
fn to_uint(&self) -> uint { let BytePos(n) = *self; n as uint }
|
fn to_uint(&self) -> usize { let BytePos(n) = *self; n as usize }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Add for BytePos {
|
impl Add for BytePos {
|
||||||
|
@ -70,8 +70,8 @@ impl Sub for BytePos {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pos for CharPos {
|
impl Pos for CharPos {
|
||||||
fn from_uint(n: uint) -> CharPos { CharPos(n) }
|
fn from_uint(n: usize) -> CharPos { CharPos(n) }
|
||||||
fn to_uint(&self) -> uint { let CharPos(n) = *self; n }
|
fn to_uint(&self) -> usize { let CharPos(n) = *self; n }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Add for CharPos {
|
impl Add for CharPos {
|
||||||
|
@ -173,7 +173,7 @@ pub struct Loc {
|
||||||
/// Information about the original source
|
/// Information about the original source
|
||||||
pub file: Rc<FileMap>,
|
pub file: Rc<FileMap>,
|
||||||
/// The (1-based) line number
|
/// The (1-based) line number
|
||||||
pub line: uint,
|
pub line: usize,
|
||||||
/// The (0-based) column offset
|
/// The (0-based) column offset
|
||||||
pub col: CharPos
|
pub col: CharPos
|
||||||
}
|
}
|
||||||
|
@ -183,13 +183,13 @@ pub struct Loc {
|
||||||
// perhaps they should just be removed.
|
// perhaps they should just be removed.
|
||||||
pub struct LocWithOpt {
|
pub struct LocWithOpt {
|
||||||
pub filename: FileName,
|
pub filename: FileName,
|
||||||
pub line: uint,
|
pub line: usize,
|
||||||
pub col: CharPos,
|
pub col: CharPos,
|
||||||
pub file: Option<Rc<FileMap>>,
|
pub file: Option<Rc<FileMap>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// used to be structural records. Better names, anyone?
|
// used to be structural records. Better names, anyone?
|
||||||
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
|
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
|
||||||
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
|
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
|
||||||
|
|
||||||
/// The syntax with which a macro was invoked.
|
/// The syntax with which a macro was invoked.
|
||||||
|
@ -258,7 +258,7 @@ pub type FileName = String;
|
||||||
|
|
||||||
pub struct FileLines {
|
pub struct FileLines {
|
||||||
pub file: Rc<FileMap>,
|
pub file: Rc<FileMap>,
|
||||||
pub lines: Vec<uint>
|
pub lines: Vec<usize>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Identifies an offset of a multi-byte character in a FileMap
|
/// Identifies an offset of a multi-byte character in a FileMap
|
||||||
|
@ -267,7 +267,7 @@ pub struct MultiByteChar {
|
||||||
/// The absolute offset of the character in the CodeMap
|
/// The absolute offset of the character in the CodeMap
|
||||||
pub pos: BytePos,
|
pub pos: BytePos,
|
||||||
/// The number of bytes, >=2
|
/// The number of bytes, >=2
|
||||||
pub bytes: uint,
|
pub bytes: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A single source in the CodeMap
|
/// A single source in the CodeMap
|
||||||
|
@ -306,7 +306,7 @@ impl FileMap {
|
||||||
|
|
||||||
/// get a line from the list of pre-computed line-beginnings
|
/// get a line from the list of pre-computed line-beginnings
|
||||||
///
|
///
|
||||||
pub fn get_line(&self, line_number: uint) -> Option<String> {
|
pub fn get_line(&self, line_number: usize) -> Option<String> {
|
||||||
let lines = self.lines.borrow();
|
let lines = self.lines.borrow();
|
||||||
lines.get(line_number).map(|&line| {
|
lines.get(line_number).map(|&line| {
|
||||||
let begin: BytePos = line - self.start_pos;
|
let begin: BytePos = line - self.start_pos;
|
||||||
|
@ -319,7 +319,7 @@ impl FileMap {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
|
pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
|
||||||
assert!(bytes >=2 && bytes <= 4);
|
assert!(bytes >=2 && bytes <= 4);
|
||||||
let mbc = MultiByteChar {
|
let mbc = MultiByteChar {
|
||||||
pos: pos,
|
pos: pos,
|
||||||
|
@ -430,7 +430,7 @@ impl CodeMap {
|
||||||
let lo = self.lookup_char_pos(sp.lo);
|
let lo = self.lookup_char_pos(sp.lo);
|
||||||
let hi = self.lookup_char_pos(sp.hi);
|
let hi = self.lookup_char_pos(sp.hi);
|
||||||
let mut lines = Vec::new();
|
let mut lines = Vec::new();
|
||||||
for i in range(lo.line - 1u, hi.line as uint) {
|
for i in range(lo.line - 1u, hi.line as usize) {
|
||||||
lines.push(i);
|
lines.push(i);
|
||||||
};
|
};
|
||||||
FileLines {file: lo.file, lines: lines}
|
FileLines {file: lo.file, lines: lines}
|
||||||
|
@ -494,7 +494,7 @@ impl CodeMap {
|
||||||
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
|
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
|
fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
|
||||||
let files = self.files.borrow();
|
let files = self.files.borrow();
|
||||||
let files = &*files;
|
let files = &*files;
|
||||||
let len = files.len();
|
let len = files.len();
|
||||||
|
@ -579,7 +579,7 @@ impl CodeMap {
|
||||||
{
|
{
|
||||||
match id {
|
match id {
|
||||||
NO_EXPANSION => f(None),
|
NO_EXPANSION => f(None),
|
||||||
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
|
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ use term::WriterWrapper;
|
||||||
use term;
|
use term;
|
||||||
|
|
||||||
/// maximum number of lines we will print for each error; arbitrary.
|
/// maximum number of lines we will print for each error; arbitrary.
|
||||||
static MAX_LINES: uint = 6u;
|
static MAX_LINES: usize = 6u;
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub enum RenderSpan {
|
pub enum RenderSpan {
|
||||||
|
@ -137,7 +137,7 @@ impl SpanHandler {
|
||||||
/// (fatal, bug, unimpl) may cause immediate exit,
|
/// (fatal, bug, unimpl) may cause immediate exit,
|
||||||
/// others log errors for later reporting.
|
/// others log errors for later reporting.
|
||||||
pub struct Handler {
|
pub struct Handler {
|
||||||
err_count: Cell<uint>,
|
err_count: Cell<usize>,
|
||||||
emit: RefCell<Box<Emitter + Send>>,
|
emit: RefCell<Box<Emitter + Send>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ impl Handler {
|
||||||
pub fn bump_err_count(&self) {
|
pub fn bump_err_count(&self) {
|
||||||
self.err_count.set(self.err_count.get() + 1u);
|
self.err_count.set(self.err_count.get() + 1u);
|
||||||
}
|
}
|
||||||
pub fn err_count(&self) -> uint {
|
pub fn err_count(&self) -> usize {
|
||||||
self.err_count.get()
|
self.err_count.get()
|
||||||
}
|
}
|
||||||
pub fn has_errors(&self) -> bool {
|
pub fn has_errors(&self) -> bool {
|
||||||
|
|
|
@ -548,7 +548,7 @@ pub struct ExtCtxt<'a> {
|
||||||
pub exported_macros: Vec<ast::MacroDef>,
|
pub exported_macros: Vec<ast::MacroDef>,
|
||||||
|
|
||||||
pub syntax_env: SyntaxEnv,
|
pub syntax_env: SyntaxEnv,
|
||||||
pub recursion_count: uint,
|
pub recursion_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ExtCtxt<'a> {
|
impl<'a> ExtCtxt<'a> {
|
||||||
|
|
|
@ -116,7 +116,7 @@ pub trait AstBuilder {
|
||||||
fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
|
fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
|
||||||
fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
|
fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
|
||||||
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>,
|
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>,
|
||||||
idx: uint) -> P<ast::Expr>;
|
idx: usize) -> P<ast::Expr>;
|
||||||
fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
||||||
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
||||||
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>,
|
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>,
|
||||||
|
@ -134,7 +134,7 @@ pub trait AstBuilder {
|
||||||
|
|
||||||
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr>;
|
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr>;
|
||||||
|
|
||||||
fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr>;
|
fn expr_uint(&self, span: Span, i: usize) -> P<ast::Expr>;
|
||||||
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr>;
|
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr>;
|
||||||
fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>;
|
fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>;
|
||||||
fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>;
|
fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>;
|
||||||
|
@ -587,7 +587,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
let id = Spanned { node: ident, span: field_span };
|
let id = Spanned { node: ident, span: field_span };
|
||||||
self.expr(sp, ast::ExprField(expr, id))
|
self.expr(sp, ast::ExprField(expr, id))
|
||||||
}
|
}
|
||||||
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
|
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> {
|
||||||
let field_span = Span {
|
let field_span = Span {
|
||||||
lo: sp.lo - Pos::from_uint(idx.to_string().len()),
|
lo: sp.lo - Pos::from_uint(idx.to_string().len()),
|
||||||
hi: sp.hi,
|
hi: sp.hi,
|
||||||
|
@ -641,7 +641,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr> {
|
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr> {
|
||||||
self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
|
self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
|
||||||
}
|
}
|
||||||
fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr> {
|
fn expr_uint(&self, span: Span, i: usize) -> P<ast::Expr> {
|
||||||
self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs(false))))
|
self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs(false))))
|
||||||
}
|
}
|
||||||
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
|
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
|
||||||
|
|
|
@ -179,14 +179,14 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
||||||
|
|
||||||
/// Create a decoder for a single enum variant/struct:
|
/// Create a decoder for a single enum variant/struct:
|
||||||
/// - `outer_pat_path` is the path to this enum variant/struct
|
/// - `outer_pat_path` is the path to this enum variant/struct
|
||||||
/// - `getarg` should retrieve the `uint`-th field with name `@str`.
|
/// - `getarg` should retrieve the `usize`-th field with name `@str`.
|
||||||
fn decode_static_fields<F>(cx: &mut ExtCtxt,
|
fn decode_static_fields<F>(cx: &mut ExtCtxt,
|
||||||
trait_span: Span,
|
trait_span: Span,
|
||||||
outer_pat_path: ast::Path,
|
outer_pat_path: ast::Path,
|
||||||
fields: &StaticFields,
|
fields: &StaticFields,
|
||||||
mut getarg: F)
|
mut getarg: F)
|
||||||
-> P<Expr> where
|
-> P<Expr> where
|
||||||
F: FnMut(&mut ExtCtxt, Span, InternedString, uint) -> P<Expr>,
|
F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P<Expr>,
|
||||||
{
|
{
|
||||||
match *fields {
|
match *fields {
|
||||||
Unnamed(ref fields) => {
|
Unnamed(ref fields) => {
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
//!
|
//!
|
||||||
//! ```ignore
|
//! ```ignore
|
||||||
//! #[derive(Encodable, Decodable)]
|
//! #[derive(Encodable, Decodable)]
|
||||||
//! struct Node { id: uint }
|
//! struct Node { id: usize }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! would generate two implementations like:
|
//! would generate two implementations like:
|
||||||
|
|
|
@ -294,7 +294,7 @@ pub enum SubstructureFields<'a> {
|
||||||
/// Matching variants of the enum: variant index, ast::Variant,
|
/// Matching variants of the enum: variant index, ast::Variant,
|
||||||
/// fields: the field name is only non-`None` in the case of a struct
|
/// fields: the field name is only non-`None` in the case of a struct
|
||||||
/// variant.
|
/// variant.
|
||||||
EnumMatching(uint, &'a ast::Variant, Vec<FieldInfo>),
|
EnumMatching(usize, &'a ast::Variant, Vec<FieldInfo>),
|
||||||
|
|
||||||
/// Non-matching variants of the enum, but with all state hidden from
|
/// Non-matching variants of the enum, but with all state hidden from
|
||||||
/// the consequent code. The first component holds `Ident`s for all of
|
/// the consequent code. The first component holds `Ident`s for all of
|
||||||
|
@ -915,7 +915,7 @@ impl<'a> MethodDef<'a> {
|
||||||
.collect::<Vec<ast::Ident>>();
|
.collect::<Vec<ast::Ident>>();
|
||||||
|
|
||||||
// The `vi_idents` will be bound, solely in the catch-all, to
|
// The `vi_idents` will be bound, solely in the catch-all, to
|
||||||
// a series of let statements mapping each self_arg to a uint
|
// a series of let statements mapping each self_arg to a usize
|
||||||
// corresponding to its variant index.
|
// corresponding to its variant index.
|
||||||
let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
|
let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
|
||||||
.map(|name| { let vi_suffix = format!("{}_vi", &name[]);
|
.map(|name| { let vi_suffix = format!("{}_vi", &name[]);
|
||||||
|
@ -1039,7 +1039,7 @@ impl<'a> MethodDef<'a> {
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
||||||
// Build a series of let statements mapping each self_arg
|
// Build a series of let statements mapping each self_arg
|
||||||
// to a uint corresponding to its variant index.
|
// to a usize corresponding to its variant index.
|
||||||
// i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
|
// i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
|
||||||
// with three Self args, builds three statements:
|
// with three Self args, builds three statements:
|
||||||
//
|
//
|
||||||
|
|
|
@ -1311,7 +1311,7 @@ fn new_span(cx: &ExtCtxt, sp: Span) -> Span {
|
||||||
pub struct ExpansionConfig {
|
pub struct ExpansionConfig {
|
||||||
pub crate_name: String,
|
pub crate_name: String,
|
||||||
pub enable_quotes: bool,
|
pub enable_quotes: bool,
|
||||||
pub recursion_limit: uint,
|
pub recursion_limit: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpansionConfig {
|
impl ExpansionConfig {
|
||||||
|
@ -1595,7 +1595,7 @@ mod test {
|
||||||
// in principle, you might want to control this boolean on a per-varref basis,
|
// in principle, you might want to control this boolean on a per-varref basis,
|
||||||
// but that would make things even harder to understand, and might not be
|
// but that would make things even harder to understand, and might not be
|
||||||
// necessary for thorough testing.
|
// necessary for thorough testing.
|
||||||
type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
|
type RenamingTest = (&'static str, Vec<Vec<usize>>, bool);
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn automatic_renaming () {
|
fn automatic_renaming () {
|
||||||
|
@ -1749,7 +1749,7 @@ mod test {
|
||||||
}
|
}
|
||||||
|
|
||||||
// run one of the renaming tests
|
// run one of the renaming tests
|
||||||
fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
|
fn run_renaming_test(t: &RenamingTest, test_idx: usize) {
|
||||||
let invalid_name = token::special_idents::invalid.name;
|
let invalid_name = token::special_idents::invalid.name;
|
||||||
let (teststr, bound_connections, bound_ident_check) = match *t {
|
let (teststr, bound_connections, bound_ident_check) = match *t {
|
||||||
(ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
|
(ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
|
||||||
|
|
|
@ -31,7 +31,7 @@ enum ArgumentType {
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Position {
|
enum Position {
|
||||||
Exact(uint),
|
Exact(usize),
|
||||||
Named(String),
|
Named(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,11 +61,11 @@ struct Context<'a, 'b:'a> {
|
||||||
/// Stays `true` if all formatting parameters are default (as in "{}{}").
|
/// Stays `true` if all formatting parameters are default (as in "{}{}").
|
||||||
all_pieces_simple: bool,
|
all_pieces_simple: bool,
|
||||||
|
|
||||||
name_positions: HashMap<String, uint>,
|
name_positions: HashMap<String, usize>,
|
||||||
|
|
||||||
/// Updated as arguments are consumed or methods are entered
|
/// Updated as arguments are consumed or methods are entered
|
||||||
nest_level: uint,
|
nest_level: usize,
|
||||||
next_arg: uint,
|
next_arg: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses the arguments from the given list of tokens, returning None
|
/// Parses the arguments from the given list of tokens, returning None
|
||||||
|
|
|
@ -187,7 +187,7 @@ fn resolve_internal(id: Ident,
|
||||||
}
|
}
|
||||||
|
|
||||||
let resolved = {
|
let resolved = {
|
||||||
let result = (*table.table.borrow())[id.ctxt as uint];
|
let result = (*table.table.borrow())[id.ctxt as usize];
|
||||||
match result {
|
match result {
|
||||||
EmptyCtxt => id.name,
|
EmptyCtxt => id.name,
|
||||||
// ignore marks here:
|
// ignore marks here:
|
||||||
|
@ -231,7 +231,7 @@ fn marksof_internal(ctxt: SyntaxContext,
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
let mut loopvar = ctxt;
|
let mut loopvar = ctxt;
|
||||||
loop {
|
loop {
|
||||||
let table_entry = (*table.table.borrow())[loopvar as uint];
|
let table_entry = (*table.table.borrow())[loopvar as usize];
|
||||||
match table_entry {
|
match table_entry {
|
||||||
EmptyCtxt => {
|
EmptyCtxt => {
|
||||||
return result;
|
return result;
|
||||||
|
@ -258,7 +258,7 @@ fn marksof_internal(ctxt: SyntaxContext,
|
||||||
/// FAILS when outside is not a mark.
|
/// FAILS when outside is not a mark.
|
||||||
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
|
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
|
||||||
with_sctable(|sctable| {
|
with_sctable(|sctable| {
|
||||||
match (*sctable.table.borrow())[ctxt as uint] {
|
match (*sctable.table.borrow())[ctxt as usize] {
|
||||||
Mark(mrk, _) => mrk,
|
Mark(mrk, _) => mrk,
|
||||||
_ => panic!("can't retrieve outer mark when outside is not a mark")
|
_ => panic!("can't retrieve outer mark when outside is not a mark")
|
||||||
}
|
}
|
||||||
|
@ -330,7 +330,7 @@ mod tests {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
loop {
|
loop {
|
||||||
let table = table.table.borrow();
|
let table = table.table.borrow();
|
||||||
match (*table)[sc as uint] {
|
match (*table)[sc as usize] {
|
||||||
EmptyCtxt => {return result;},
|
EmptyCtxt => {return result;},
|
||||||
Mark(mrk,tail) => {
|
Mark(mrk,tail) => {
|
||||||
result.push(M(mrk));
|
result.push(M(mrk));
|
||||||
|
|
|
@ -110,14 +110,14 @@ enum TokenTreeOrTokenTreeVec {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTreeOrTokenTreeVec {
|
impl TokenTreeOrTokenTreeVec {
|
||||||
fn len(&self) -> uint {
|
fn len(&self) -> usize {
|
||||||
match self {
|
match self {
|
||||||
&TtSeq(ref v) => v.len(),
|
&TtSeq(ref v) => v.len(),
|
||||||
&Tt(ref tt) => tt.len(),
|
&Tt(ref tt) => tt.len(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_tt(&self, index: uint) -> TokenTree {
|
fn get_tt(&self, index: usize) -> TokenTree {
|
||||||
match self {
|
match self {
|
||||||
&TtSeq(ref v) => v[index].clone(),
|
&TtSeq(ref v) => v[index].clone(),
|
||||||
&Tt(ref tt) => tt.get_tt(index),
|
&Tt(ref tt) => tt.get_tt(index),
|
||||||
|
@ -129,7 +129,7 @@ impl TokenTreeOrTokenTreeVec {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct MatcherTtFrame {
|
struct MatcherTtFrame {
|
||||||
elts: TokenTreeOrTokenTreeVec,
|
elts: TokenTreeOrTokenTreeVec,
|
||||||
idx: uint,
|
idx: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -137,16 +137,16 @@ pub struct MatcherPos {
|
||||||
stack: Vec<MatcherTtFrame>,
|
stack: Vec<MatcherTtFrame>,
|
||||||
top_elts: TokenTreeOrTokenTreeVec,
|
top_elts: TokenTreeOrTokenTreeVec,
|
||||||
sep: Option<Token>,
|
sep: Option<Token>,
|
||||||
idx: uint,
|
idx: usize,
|
||||||
up: Option<Box<MatcherPos>>,
|
up: Option<Box<MatcherPos>>,
|
||||||
matches: Vec<Vec<Rc<NamedMatch>>>,
|
matches: Vec<Vec<Rc<NamedMatch>>>,
|
||||||
match_lo: uint,
|
match_lo: usize,
|
||||||
match_cur: uint,
|
match_cur: usize,
|
||||||
match_hi: uint,
|
match_hi: usize,
|
||||||
sp_lo: BytePos,
|
sp_lo: BytePos,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn count_names(ms: &[TokenTree]) -> uint {
|
pub fn count_names(ms: &[TokenTree]) -> usize {
|
||||||
ms.iter().fold(0, |count, elt| {
|
ms.iter().fold(0, |count, elt| {
|
||||||
count + match elt {
|
count + match elt {
|
||||||
&TtSequence(_, ref seq) => {
|
&TtSequence(_, ref seq) => {
|
||||||
|
@ -206,7 +206,7 @@ pub enum NamedMatch {
|
||||||
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||||
-> HashMap<Ident, Rc<NamedMatch>> {
|
-> HashMap<Ident, Rc<NamedMatch>> {
|
||||||
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
|
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
|
||||||
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
|
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
|
||||||
match m {
|
match m {
|
||||||
&TtSequence(_, ref seq) => {
|
&TtSequence(_, ref seq) => {
|
||||||
for next_m in seq.tts.iter() {
|
for next_m in seq.tts.iter() {
|
||||||
|
|
|
@ -27,7 +27,7 @@ use std::collections::HashMap;
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct TtFrame {
|
struct TtFrame {
|
||||||
forest: TokenTree,
|
forest: TokenTree,
|
||||||
idx: uint,
|
idx: usize,
|
||||||
dotdotdoted: bool,
|
dotdotdoted: bool,
|
||||||
sep: Option<Token>,
|
sep: Option<Token>,
|
||||||
}
|
}
|
||||||
|
@ -43,8 +43,8 @@ pub struct TtReader<'a> {
|
||||||
|
|
||||||
// Some => return imported_from as the next token
|
// Some => return imported_from as the next token
|
||||||
crate_name_next: Option<Span>,
|
crate_name_next: Option<Span>,
|
||||||
repeat_idx: Vec<uint>,
|
repeat_idx: Vec<usize>,
|
||||||
repeat_len: Vec<uint>,
|
repeat_len: Vec<usize>,
|
||||||
/* cached: */
|
/* cached: */
|
||||||
pub cur_tok: Token,
|
pub cur_tok: Token,
|
||||||
pub cur_span: Span,
|
pub cur_span: Span,
|
||||||
|
@ -124,7 +124,7 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
enum LockstepIterSize {
|
enum LockstepIterSize {
|
||||||
LisUnconstrained,
|
LisUnconstrained,
|
||||||
LisConstraint(uint, Ident),
|
LisConstraint(usize, Ident),
|
||||||
LisContradiction(String),
|
LisContradiction(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -174,7 +174,7 @@ pub trait Folder : Sized {
|
||||||
noop_fold_ident(i, self)
|
noop_fold_ident(i, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_uint(&mut self, i: uint) -> uint {
|
fn fold_uint(&mut self, i: usize) -> usize {
|
||||||
noop_fold_uint(i, self)
|
noop_fold_uint(i, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -505,7 +505,7 @@ pub fn noop_fold_ident<T: Folder>(i: Ident, _: &mut T) -> Ident {
|
||||||
i
|
i
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn noop_fold_uint<T: Folder>(i: uint, _: &mut T) -> uint {
|
pub fn noop_fold_uint<T: Folder>(i: usize, _: &mut T) -> usize {
|
||||||
i
|
i
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ use print::pprust;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::string::String;
|
use std::string::String;
|
||||||
use std::uint;
|
use std::usize;
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq)]
|
#[derive(Clone, Copy, PartialEq)]
|
||||||
pub enum CommentStyle {
|
pub enum CommentStyle {
|
||||||
|
@ -87,7 +87,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
||||||
|
|
||||||
/// remove a "[ \t]*\*" block from each line, if possible
|
/// remove a "[ \t]*\*" block from each line, if possible
|
||||||
fn horizontal_trim(lines: Vec<String> ) -> Vec<String> {
|
fn horizontal_trim(lines: Vec<String> ) -> Vec<String> {
|
||||||
let mut i = uint::MAX;
|
let mut i = usize::MAX;
|
||||||
let mut can_trim = true;
|
let mut can_trim = true;
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for line in lines.iter() {
|
for line in lines.iter() {
|
||||||
|
@ -206,10 +206,10 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool,
|
||||||
/// Returns None if the first col chars of s contain a non-whitespace char.
|
/// Returns None if the first col chars of s contain a non-whitespace char.
|
||||||
/// Otherwise returns Some(k) where k is first char offset after that leading
|
/// Otherwise returns Some(k) where k is first char offset after that leading
|
||||||
/// whitespace. Note k may be outside bounds of s.
|
/// whitespace. Note k may be outside bounds of s.
|
||||||
fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
|
fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
|
||||||
let len = s.len();
|
let len = s.len();
|
||||||
let mut col = col.to_uint();
|
let mut col = col.to_uint();
|
||||||
let mut cursor: uint = 0;
|
let mut cursor: usize = 0;
|
||||||
while col > 0 && cursor < len {
|
while col > 0 && cursor < len {
|
||||||
let r: str::CharRange = s.char_range_at(cursor);
|
let r: str::CharRange = s.char_range_at(cursor);
|
||||||
if !r.ch.is_whitespace() {
|
if !r.ch.is_whitespace() {
|
||||||
|
|
|
@ -295,7 +295,7 @@ impl<'a> StringReader<'a> {
|
||||||
return s.into_cow();
|
return s.into_cow();
|
||||||
|
|
||||||
fn translate_crlf_(rdr: &StringReader, start: BytePos,
|
fn translate_crlf_(rdr: &StringReader, start: BytePos,
|
||||||
s: &str, errmsg: &str, mut i: uint) -> String {
|
s: &str, errmsg: &str, mut i: usize) -> String {
|
||||||
let mut buf = String::with_capacity(s.len());
|
let mut buf = String::with_capacity(s.len());
|
||||||
let mut j = 0;
|
let mut j = 0;
|
||||||
while i < s.len() {
|
while i < s.len() {
|
||||||
|
@ -645,7 +645,7 @@ impl<'a> StringReader<'a> {
|
||||||
|
|
||||||
/// Scan through any digits (base `radix`) or underscores, and return how
|
/// Scan through any digits (base `radix`) or underscores, and return how
|
||||||
/// many digits there were.
|
/// many digits there were.
|
||||||
fn scan_digits(&mut self, radix: uint) -> uint {
|
fn scan_digits(&mut self, radix: usize) -> usize {
|
||||||
let mut len = 0u;
|
let mut len = 0u;
|
||||||
loop {
|
loop {
|
||||||
let c = self.curr;
|
let c = self.curr;
|
||||||
|
@ -724,7 +724,7 @@ impl<'a> StringReader<'a> {
|
||||||
/// Scan over `n_digits` hex digits, stopping at `delim`, reporting an
|
/// Scan over `n_digits` hex digits, stopping at `delim`, reporting an
|
||||||
/// error if too many or too few digits are encountered.
|
/// error if too many or too few digits are encountered.
|
||||||
fn scan_hex_digits(&mut self,
|
fn scan_hex_digits(&mut self,
|
||||||
n_digits: uint,
|
n_digits: usize,
|
||||||
delim: char,
|
delim: char,
|
||||||
below_0x7f_only: bool)
|
below_0x7f_only: bool)
|
||||||
-> bool {
|
-> bool {
|
||||||
|
@ -877,7 +877,7 @@ impl<'a> StringReader<'a> {
|
||||||
fn scan_unicode_escape(&mut self, delim: char) -> bool {
|
fn scan_unicode_escape(&mut self, delim: char) -> bool {
|
||||||
self.bump(); // past the {
|
self.bump(); // past the {
|
||||||
let start_bpos = self.last_pos;
|
let start_bpos = self.last_pos;
|
||||||
let mut count: uint = 0;
|
let mut count = 0us;
|
||||||
let mut accum_int = 0;
|
let mut accum_int = 0;
|
||||||
|
|
||||||
while !self.curr_is('}') && count <= 6 {
|
while !self.curr_is('}') && count <= 6 {
|
||||||
|
@ -935,7 +935,7 @@ impl<'a> StringReader<'a> {
|
||||||
|
|
||||||
/// Check that a base is valid for a floating literal, emitting a nice
|
/// Check that a base is valid for a floating literal, emitting a nice
|
||||||
/// error if it isn't.
|
/// error if it isn't.
|
||||||
fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: uint) {
|
fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
|
||||||
match base {
|
match base {
|
||||||
16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
|
16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
|
||||||
supported"),
|
supported"),
|
||||||
|
|
|
@ -374,7 +374,7 @@ pub fn maybe_aborted<T>(result: T, mut p: Parser) -> T {
|
||||||
/// Rather than just accepting/rejecting a given literal, unescapes it as
|
/// Rather than just accepting/rejecting a given literal, unescapes it as
|
||||||
/// well. Can take any slice prefixed by a character escape. Returns the
|
/// well. Can take any slice prefixed by a character escape. Returns the
|
||||||
/// character and the number of characters consumed.
|
/// character and the number of characters consumed.
|
||||||
pub fn char_lit(lit: &str) -> (char, int) {
|
pub fn char_lit(lit: &str) -> (char, isize) {
|
||||||
use std::{num, char};
|
use std::{num, char};
|
||||||
|
|
||||||
let mut chars = lit.chars();
|
let mut chars = lit.chars();
|
||||||
|
@ -401,19 +401,19 @@ pub fn char_lit(lit: &str) -> (char, int) {
|
||||||
let msg = format!("lexer should have rejected a bad character escape {}", lit);
|
let msg = format!("lexer should have rejected a bad character escape {}", lit);
|
||||||
let msg2 = &msg[];
|
let msg2 = &msg[];
|
||||||
|
|
||||||
fn esc(len: uint, lit: &str) -> Option<(char, int)> {
|
fn esc(len: usize, lit: &str) -> Option<(char, isize)> {
|
||||||
num::from_str_radix(&lit[2..len], 16)
|
num::from_str_radix(&lit[2..len], 16)
|
||||||
.and_then(char::from_u32)
|
.and_then(char::from_u32)
|
||||||
.map(|x| (x, len as int))
|
.map(|x| (x, len as isize))
|
||||||
}
|
}
|
||||||
|
|
||||||
let unicode_escape = |&: | -> Option<(char, int)>
|
let unicode_escape = |&: | -> Option<(char, isize)>
|
||||||
if lit.as_bytes()[2] == b'{' {
|
if lit.as_bytes()[2] == b'{' {
|
||||||
let idx = lit.find('}').expect(msg2);
|
let idx = lit.find('}').expect(msg2);
|
||||||
let subslice = &lit[3..idx];
|
let subslice = &lit[3..idx];
|
||||||
num::from_str_radix(subslice, 16)
|
num::from_str_radix(subslice, 16)
|
||||||
.and_then(char::from_u32)
|
.and_then(char::from_u32)
|
||||||
.map(|x| (x, subslice.chars().count() as int + 4))
|
.map(|x| (x, subslice.chars().count() as isize + 4))
|
||||||
} else {
|
} else {
|
||||||
esc(6, lit)
|
esc(6, lit)
|
||||||
};
|
};
|
||||||
|
@ -437,7 +437,7 @@ pub fn str_lit(lit: &str) -> String {
|
||||||
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
|
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
|
||||||
|
|
||||||
/// Eat everything up to a non-whitespace
|
/// Eat everything up to a non-whitespace
|
||||||
fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
|
fn eat<'a>(it: &mut iter::Peekable<(usize, char), str::CharIndices<'a>>) {
|
||||||
loop {
|
loop {
|
||||||
match it.peek().map(|x| x.1) {
|
match it.peek().map(|x| x.1) {
|
||||||
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
|
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
|
||||||
|
@ -568,7 +568,7 @@ pub fn float_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> a
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
|
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
|
||||||
pub fn byte_lit(lit: &str) -> (u8, uint) {
|
pub fn byte_lit(lit: &str) -> (u8, usize) {
|
||||||
let err = |&: i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
|
let err = |&: i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
|
||||||
|
|
||||||
if lit.len() == 1 {
|
if lit.len() == 1 {
|
||||||
|
@ -606,7 +606,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
|
||||||
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
|
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
|
||||||
|
|
||||||
/// Eat everything up to a non-whitespace
|
/// Eat everything up to a non-whitespace
|
||||||
fn eat<'a, I: Iterator<Item=(uint, u8)>>(it: &mut iter::Peekable<(uint, u8), I>) {
|
fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<(usize, u8), I>) {
|
||||||
loop {
|
loop {
|
||||||
match it.peek().map(|x| x.1) {
|
match it.peek().map(|x| x.1) {
|
||||||
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
|
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
|
||||||
|
@ -1161,11 +1161,11 @@ mod test {
|
||||||
|
|
||||||
#[test] fn span_of_self_arg_pat_idents_are_correct() {
|
#[test] fn span_of_self_arg_pat_idents_are_correct() {
|
||||||
|
|
||||||
let srcs = ["impl z { fn a (&self, &myarg: int) {} }",
|
let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
|
||||||
"impl z { fn a (&mut self, &myarg: int) {} }",
|
"impl z { fn a (&mut self, &myarg: i32) {} }",
|
||||||
"impl z { fn a (&'a self, &myarg: int) {} }",
|
"impl z { fn a (&'a self, &myarg: i32) {} }",
|
||||||
"impl z { fn a (self, &myarg: int) {} }",
|
"impl z { fn a (self, &myarg: i32) {} }",
|
||||||
"impl z { fn a (self: Foo, &myarg: int) {} }",
|
"impl z { fn a (self: Foo, &myarg: i32) {} }",
|
||||||
];
|
];
|
||||||
|
|
||||||
for &src in srcs.iter() {
|
for &src in srcs.iter() {
|
||||||
|
|
|
@ -62,7 +62,7 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
|
||||||
"use a `move ||` expression instead",
|
"use a `move ||` expression instead",
|
||||||
),
|
),
|
||||||
ObsoleteSyntax::ClosureType => (
|
ObsoleteSyntax::ClosureType => (
|
||||||
"`|uint| -> bool` closure type syntax",
|
"`|usize| -> bool` closure type syntax",
|
||||||
"use unboxed closures instead, no type annotation needed"
|
"use unboxed closures instead, no type annotation needed"
|
||||||
),
|
),
|
||||||
ObsoleteSyntax::Sized => (
|
ObsoleteSyntax::Sized => (
|
||||||
|
|
|
@ -292,9 +292,9 @@ pub struct Parser<'a> {
|
||||||
pub buffer: [TokenAndSpan; 4],
|
pub buffer: [TokenAndSpan; 4],
|
||||||
pub buffer_start: int,
|
pub buffer_start: int,
|
||||||
pub buffer_end: int,
|
pub buffer_end: int,
|
||||||
pub tokens_consumed: uint,
|
pub tokens_consumed: usize,
|
||||||
pub restrictions: Restrictions,
|
pub restrictions: Restrictions,
|
||||||
pub quote_depth: uint, // not (yet) related to the quasiquoter
|
pub quote_depth: usize, // not (yet) related to the quasiquoter
|
||||||
pub reader: Box<Reader+'a>,
|
pub reader: Box<Reader+'a>,
|
||||||
pub interner: Rc<token::IdentInterner>,
|
pub interner: Rc<token::IdentInterner>,
|
||||||
/// The set of seen errors about obsolete syntax. Used to suppress
|
/// The set of seen errors about obsolete syntax. Used to suppress
|
||||||
|
@ -932,8 +932,8 @@ impl<'a> Parser<'a> {
|
||||||
self.reader.real_token()
|
self.reader.real_token()
|
||||||
} else {
|
} else {
|
||||||
// Avoid token copies with `replace`.
|
// Avoid token copies with `replace`.
|
||||||
let buffer_start = self.buffer_start as uint;
|
let buffer_start = self.buffer_start as usize;
|
||||||
let next_index = (buffer_start + 1) & 3 as uint;
|
let next_index = (buffer_start + 1) & 3 as usize;
|
||||||
self.buffer_start = next_index as int;
|
self.buffer_start = next_index as int;
|
||||||
|
|
||||||
let placeholder = TokenAndSpan {
|
let placeholder = TokenAndSpan {
|
||||||
|
@ -972,15 +972,15 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
return (4 - self.buffer_start) + self.buffer_end;
|
return (4 - self.buffer_start) + self.buffer_end;
|
||||||
}
|
}
|
||||||
pub fn look_ahead<R, F>(&mut self, distance: uint, f: F) -> R where
|
pub fn look_ahead<R, F>(&mut self, distance: usize, f: F) -> R where
|
||||||
F: FnOnce(&token::Token) -> R,
|
F: FnOnce(&token::Token) -> R,
|
||||||
{
|
{
|
||||||
let dist = distance as int;
|
let dist = distance as int;
|
||||||
while self.buffer_length() < dist {
|
while self.buffer_length() < dist {
|
||||||
self.buffer[self.buffer_end as uint] = self.reader.real_token();
|
self.buffer[self.buffer_end as usize] = self.reader.real_token();
|
||||||
self.buffer_end = (self.buffer_end + 1) & 3;
|
self.buffer_end = (self.buffer_end + 1) & 3;
|
||||||
}
|
}
|
||||||
f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok)
|
f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
|
||||||
}
|
}
|
||||||
pub fn fatal(&mut self, m: &str) -> ! {
|
pub fn fatal(&mut self, m: &str) -> ! {
|
||||||
self.sess.span_diagnostic.span_fatal(self.span, m)
|
self.sess.span_diagnostic.span_fatal(self.span, m)
|
||||||
|
@ -2087,7 +2087,7 @@ impl<'a> Parser<'a> {
|
||||||
ExprField(expr, ident)
|
ExprField(expr, ident)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
|
pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<usize>) -> ast::Expr_ {
|
||||||
ExprTupField(expr, idx)
|
ExprTupField(expr, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2485,7 +2485,7 @@ impl<'a> Parser<'a> {
|
||||||
hi = self.span.hi;
|
hi = self.span.hi;
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
||||||
let index = n.as_str().parse::<uint>();
|
let index = n.as_str().parse::<usize>();
|
||||||
match index {
|
match index {
|
||||||
Some(n) => {
|
Some(n) => {
|
||||||
let id = spanned(dot, hi, n);
|
let id = spanned(dot, hi, n);
|
||||||
|
@ -2511,7 +2511,7 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
self.span_help(last_span,
|
self.span_help(last_span,
|
||||||
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
|
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
|
||||||
float.trunc() as uint,
|
float.trunc() as usize,
|
||||||
&float.fract().to_string()[1..])[]);
|
&float.fract().to_string()[1..])[]);
|
||||||
}
|
}
|
||||||
self.abort_if_errors();
|
self.abort_if_errors();
|
||||||
|
@ -2864,7 +2864,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse an expression of binops of at least min_prec precedence
|
/// Parse an expression of binops of at least min_prec precedence
|
||||||
pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: uint) -> P<Expr> {
|
pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> P<Expr> {
|
||||||
if self.expr_is_complete(&*lhs) { return lhs; }
|
if self.expr_is_complete(&*lhs) { return lhs; }
|
||||||
|
|
||||||
// Prevent dynamic borrow errors later on by limiting the
|
// Prevent dynamic borrow errors later on by limiting the
|
||||||
|
|
|
@ -83,9 +83,9 @@ pub enum Lit {
|
||||||
Integer(ast::Name),
|
Integer(ast::Name),
|
||||||
Float(ast::Name),
|
Float(ast::Name),
|
||||||
Str_(ast::Name),
|
Str_(ast::Name),
|
||||||
StrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */
|
StrRaw(ast::Name, usize), /* raw str delimited by n hash symbols */
|
||||||
Binary(ast::Name),
|
Binary(ast::Name),
|
||||||
BinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
|
BinaryRaw(ast::Name, usize), /* raw binary str delimited by n hash symbols */
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Lit {
|
impl Lit {
|
||||||
|
@ -724,7 +724,7 @@ pub fn intern(s: &str) -> ast::Name {
|
||||||
get_ident_interner().intern(s)
|
get_ident_interner().intern(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// gensym's a new uint, using the current interner.
|
/// gensym's a new usize, using the current interner.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn gensym(s: &str) -> ast::Name {
|
pub fn gensym(s: &str) -> ast::Name {
|
||||||
get_ident_interner().gensym(s)
|
get_ident_interner().gensym(s)
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
//!
|
//!
|
||||||
//! In particular you'll see a certain amount of churn related to INTEGER vs.
|
//! In particular you'll see a certain amount of churn related to INTEGER vs.
|
||||||
//! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
|
//! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
|
||||||
//! somewhat readily? In any case, I've used uint for indices-in-buffers and
|
//! somewhat readily? In any case, I've used usize for indices-in-buffers and
|
||||||
//! ints for character-sizes-and-indentation-offsets. This respects the need
|
//! ints for character-sizes-and-indentation-offsets. This respects the need
|
||||||
//! for ints to "go negative" while carrying a pending-calculation balance, and
|
//! for ints to "go negative" while carrying a pending-calculation balance, and
|
||||||
//! helps differentiate all the numbers flying around internally (slightly).
|
//! helps differentiate all the numbers flying around internally (slightly).
|
||||||
|
@ -123,9 +123,9 @@ pub fn tok_str(token: &Token) -> String {
|
||||||
|
|
||||||
pub fn buf_str(toks: &[Token],
|
pub fn buf_str(toks: &[Token],
|
||||||
szs: &[int],
|
szs: &[int],
|
||||||
left: uint,
|
left: usize,
|
||||||
right: uint,
|
right: usize,
|
||||||
lim: uint)
|
lim: usize)
|
||||||
-> String {
|
-> String {
|
||||||
let n = toks.len();
|
let n = toks.len();
|
||||||
assert_eq!(n, szs.len());
|
assert_eq!(n, szs.len());
|
||||||
|
@ -161,14 +161,14 @@ pub struct PrintStackElem {
|
||||||
|
|
||||||
static SIZE_INFINITY: int = 0xffff;
|
static SIZE_INFINITY: int = 0xffff;
|
||||||
|
|
||||||
pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: uint) -> Printer {
|
pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
|
||||||
// Yes 3, it makes the ring buffers big enough to never
|
// Yes 3, it makes the ring buffers big enough to never
|
||||||
// fall behind.
|
// fall behind.
|
||||||
let n: uint = 3 * linewidth;
|
let n: usize = 3 * linewidth;
|
||||||
debug!("mk_printer {}", linewidth);
|
debug!("mk_printer {}", linewidth);
|
||||||
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
|
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
|
||||||
let size: Vec<int> = repeat(0i).take(n).collect();
|
let size: Vec<int> = repeat(0i).take(n).collect();
|
||||||
let scan_stack: Vec<uint> = repeat(0u).take(n).collect();
|
let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
|
||||||
Printer {
|
Printer {
|
||||||
out: out,
|
out: out,
|
||||||
buf_len: n,
|
buf_len: n,
|
||||||
|
@ -267,15 +267,15 @@ pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: uint) -> Printer {
|
||||||
/// called 'print'.
|
/// called 'print'.
|
||||||
pub struct Printer {
|
pub struct Printer {
|
||||||
pub out: Box<io::Writer+'static>,
|
pub out: Box<io::Writer+'static>,
|
||||||
buf_len: uint,
|
buf_len: usize,
|
||||||
/// Width of lines we're constrained to
|
/// Width of lines we're constrained to
|
||||||
margin: int,
|
margin: int,
|
||||||
/// Number of spaces left on line
|
/// Number of spaces left on line
|
||||||
space: int,
|
space: int,
|
||||||
/// Index of left side of input stream
|
/// Index of left side of input stream
|
||||||
left: uint,
|
left: usize,
|
||||||
/// Index of right side of input stream
|
/// Index of right side of input stream
|
||||||
right: uint,
|
right: usize,
|
||||||
/// Ring-buffer stream goes through
|
/// Ring-buffer stream goes through
|
||||||
token: Vec<Token> ,
|
token: Vec<Token> ,
|
||||||
/// Ring-buffer of calculated sizes
|
/// Ring-buffer of calculated sizes
|
||||||
|
@ -290,13 +290,13 @@ pub struct Printer {
|
||||||
/// Begin (if there is any) on top of it. Stuff is flushed off the
|
/// Begin (if there is any) on top of it. Stuff is flushed off the
|
||||||
/// bottom as it becomes irrelevant due to the primary ring-buffer
|
/// bottom as it becomes irrelevant due to the primary ring-buffer
|
||||||
/// advancing.
|
/// advancing.
|
||||||
scan_stack: Vec<uint> ,
|
scan_stack: Vec<usize> ,
|
||||||
/// Top==bottom disambiguator
|
/// Top==bottom disambiguator
|
||||||
scan_stack_empty: bool,
|
scan_stack_empty: bool,
|
||||||
/// Index of top of scan_stack
|
/// Index of top of scan_stack
|
||||||
top: uint,
|
top: usize,
|
||||||
/// Index of bottom of scan_stack
|
/// Index of bottom of scan_stack
|
||||||
bottom: uint,
|
bottom: usize,
|
||||||
/// Stack of blocks-in-progress being flushed by print
|
/// Stack of blocks-in-progress being flushed by print
|
||||||
print_stack: Vec<PrintStackElem> ,
|
print_stack: Vec<PrintStackElem> ,
|
||||||
/// Buffered indentation to avoid writing trailing whitespace
|
/// Buffered indentation to avoid writing trailing whitespace
|
||||||
|
@ -405,7 +405,7 @@ impl Printer {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
pub fn scan_push(&mut self, x: uint) {
|
pub fn scan_push(&mut self, x: usize) {
|
||||||
debug!("scan_push {}", x);
|
debug!("scan_push {}", x);
|
||||||
if self.scan_stack_empty {
|
if self.scan_stack_empty {
|
||||||
self.scan_stack_empty = false;
|
self.scan_stack_empty = false;
|
||||||
|
@ -416,7 +416,7 @@ impl Printer {
|
||||||
}
|
}
|
||||||
self.scan_stack[self.top] = x;
|
self.scan_stack[self.top] = x;
|
||||||
}
|
}
|
||||||
pub fn scan_pop(&mut self) -> uint {
|
pub fn scan_pop(&mut self) -> usize {
|
||||||
assert!((!self.scan_stack_empty));
|
assert!((!self.scan_stack_empty));
|
||||||
let x = self.scan_stack[self.top];
|
let x = self.scan_stack[self.top];
|
||||||
if self.top == self.bottom {
|
if self.top == self.bottom {
|
||||||
|
@ -426,11 +426,11 @@ impl Printer {
|
||||||
}
|
}
|
||||||
return x;
|
return x;
|
||||||
}
|
}
|
||||||
pub fn scan_top(&mut self) -> uint {
|
pub fn scan_top(&mut self) -> usize {
|
||||||
assert!((!self.scan_stack_empty));
|
assert!((!self.scan_stack_empty));
|
||||||
return self.scan_stack[self.top];
|
return self.scan_stack[self.top];
|
||||||
}
|
}
|
||||||
pub fn scan_pop_bottom(&mut self) -> uint {
|
pub fn scan_pop_bottom(&mut self) -> usize {
|
||||||
assert!((!self.scan_stack_empty));
|
assert!((!self.scan_stack_empty));
|
||||||
let x = self.scan_stack[self.bottom];
|
let x = self.scan_stack[self.bottom];
|
||||||
if self.top == self.bottom {
|
if self.top == self.bottom {
|
||||||
|
@ -620,22 +620,22 @@ impl Printer {
|
||||||
// Convenience functions to talk to the printer.
|
// Convenience functions to talk to the printer.
|
||||||
//
|
//
|
||||||
// "raw box"
|
// "raw box"
|
||||||
pub fn rbox(p: &mut Printer, indent: uint, b: Breaks) -> io::IoResult<()> {
|
pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> {
|
||||||
p.pretty_print(Token::Begin(BeginToken {
|
p.pretty_print(Token::Begin(BeginToken {
|
||||||
offset: indent as int,
|
offset: indent as int,
|
||||||
breaks: b
|
breaks: b
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ibox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
|
pub fn ibox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
|
||||||
rbox(p, indent, Breaks::Inconsistent)
|
rbox(p, indent, Breaks::Inconsistent)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cbox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
|
pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
|
||||||
rbox(p, indent, Breaks::Consistent)
|
rbox(p, indent, Breaks::Consistent)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn break_offset(p: &mut Printer, n: uint, off: int) -> io::IoResult<()> {
|
pub fn break_offset(p: &mut Printer, n: usize, off: int) -> io::IoResult<()> {
|
||||||
p.pretty_print(Token::Break(BreakToken {
|
p.pretty_print(Token::Break(BreakToken {
|
||||||
offset: off,
|
offset: off,
|
||||||
blank_space: n as int
|
blank_space: n as int
|
||||||
|
@ -662,7 +662,7 @@ pub fn zero_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
|
||||||
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
|
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {
|
pub fn spaces(p: &mut Printer, n: usize) -> io::IoResult<()> {
|
||||||
break_offset(p, n, 0)
|
break_offset(p, n, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -675,7 +675,7 @@ pub fn space(p: &mut Printer) -> io::IoResult<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> {
|
pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> {
|
||||||
spaces(p, SIZE_INFINITY as uint)
|
spaces(p, SIZE_INFINITY as usize)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hardbreak_tok_offset(off: int) -> Token {
|
pub fn hardbreak_tok_offset(off: int) -> Token {
|
||||||
|
|
|
@ -54,8 +54,8 @@ impl PpAnn for NoAnn {}
|
||||||
|
|
||||||
#[derive(Copy)]
|
#[derive(Copy)]
|
||||||
pub struct CurrentCommentAndLiteral {
|
pub struct CurrentCommentAndLiteral {
|
||||||
cur_cmnt: uint,
|
cur_cmnt: usize,
|
||||||
cur_lit: uint,
|
cur_lit: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct State<'a> {
|
pub struct State<'a> {
|
||||||
|
@ -92,10 +92,10 @@ pub fn rust_printer_annotated<'a>(writer: Box<io::Writer+'static>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
pub const indent_unit: uint = 4u;
|
pub const indent_unit: usize = 4us;
|
||||||
|
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
pub const default_columns: uint = 78u;
|
pub const default_columns: usize = 78us;
|
||||||
|
|
||||||
/// Requires you to pass an input filename and reader so that
|
/// Requires you to pass an input filename and reader so that
|
||||||
/// it can scan the input text for comments and literals to
|
/// it can scan the input text for comments and literals to
|
||||||
|
@ -459,7 +459,7 @@ fn needs_parentheses(expr: &ast::Expr) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> State<'a> {
|
impl<'a> State<'a> {
|
||||||
pub fn ibox(&mut self, u: uint) -> IoResult<()> {
|
pub fn ibox(&mut self, u: usize) -> IoResult<()> {
|
||||||
self.boxes.push(pp::Breaks::Inconsistent);
|
self.boxes.push(pp::Breaks::Inconsistent);
|
||||||
pp::ibox(&mut self.s, u)
|
pp::ibox(&mut self.s, u)
|
||||||
}
|
}
|
||||||
|
@ -469,13 +469,13 @@ impl<'a> State<'a> {
|
||||||
pp::end(&mut self.s)
|
pp::end(&mut self.s)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cbox(&mut self, u: uint) -> IoResult<()> {
|
pub fn cbox(&mut self, u: usize) -> IoResult<()> {
|
||||||
self.boxes.push(pp::Breaks::Consistent);
|
self.boxes.push(pp::Breaks::Consistent);
|
||||||
pp::cbox(&mut self.s, u)
|
pp::cbox(&mut self.s, u)
|
||||||
}
|
}
|
||||||
|
|
||||||
// "raw box"
|
// "raw box"
|
||||||
pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> {
|
pub fn rbox(&mut self, u: usize, b: pp::Breaks) -> IoResult<()> {
|
||||||
self.boxes.push(b);
|
self.boxes.push(b);
|
||||||
pp::rbox(&mut self.s, u, b)
|
pp::rbox(&mut self.s, u, b)
|
||||||
}
|
}
|
||||||
|
@ -514,11 +514,11 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bclose_(&mut self, span: codemap::Span,
|
pub fn bclose_(&mut self, span: codemap::Span,
|
||||||
indented: uint) -> IoResult<()> {
|
indented: usize) -> IoResult<()> {
|
||||||
self.bclose_maybe_open(span, indented, true)
|
self.bclose_maybe_open(span, indented, true)
|
||||||
}
|
}
|
||||||
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
|
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
|
||||||
indented: uint, close_box: bool) -> IoResult<()> {
|
indented: usize, close_box: bool) -> IoResult<()> {
|
||||||
try!(self.maybe_print_comment(span.hi));
|
try!(self.maybe_print_comment(span.hi));
|
||||||
try!(self.break_offset_if_not_bol(1u, -(indented as int)));
|
try!(self.break_offset_if_not_bol(1u, -(indented as int)));
|
||||||
try!(word(&mut self.s, "}"));
|
try!(word(&mut self.s, "}"));
|
||||||
|
@ -567,7 +567,7 @@ impl<'a> State<'a> {
|
||||||
if !self.is_bol() { try!(space(&mut self.s)); }
|
if !self.is_bol() { try!(space(&mut self.s)); }
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
pub fn break_offset_if_not_bol(&mut self, n: uint,
|
pub fn break_offset_if_not_bol(&mut self, n: usize,
|
||||||
off: int) -> IoResult<()> {
|
off: int) -> IoResult<()> {
|
||||||
if !self.is_bol() {
|
if !self.is_bol() {
|
||||||
break_offset(&mut self.s, n, off)
|
break_offset(&mut self.s, n, off)
|
||||||
|
@ -1355,7 +1355,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
|
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
|
||||||
indented: uint) -> IoResult<()> {
|
indented: usize) -> IoResult<()> {
|
||||||
self.print_block_maybe_unclosed(blk, indented, &[], false)
|
self.print_block_maybe_unclosed(blk, indented, &[], false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1367,7 +1367,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
pub fn print_block_maybe_unclosed(&mut self,
|
pub fn print_block_maybe_unclosed(&mut self,
|
||||||
blk: &ast::Block,
|
blk: &ast::Block,
|
||||||
indented: uint,
|
indented: usize,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
close_box: bool) -> IoResult<()> {
|
close_box: bool) -> IoResult<()> {
|
||||||
match blk.rules {
|
match blk.rules {
|
||||||
|
@ -1951,7 +1951,7 @@ impl<'a> State<'a> {
|
||||||
self.ann.post(self, NodeIdent(&ident))
|
self.ann.post(self, NodeIdent(&ident))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
|
pub fn print_uint(&mut self, i: usize) -> IoResult<()> {
|
||||||
word(&mut self.s, &i.to_string()[])
|
word(&mut self.s, &i.to_string()[])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3053,7 +3053,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() }
|
fn repeat(s: &str, n: usize) -> String { iter::repeat(s).take(n).collect() }
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
//! An "interner" is a data structure that associates values with uint tags and
|
//! An "interner" is a data structure that associates values with usize tags and
|
||||||
//! allows bidirectional lookup; i.e. given a value, one can easily find the
|
//! allows bidirectional lookup; i.e. given a value, one can easily find the
|
||||||
//! type, and vice versa.
|
//! type, and vice versa.
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ impl<T: Eq + Hash<Hasher> + Clone + 'static> Interner<T> {
|
||||||
(*vect)[idx.uint()].clone()
|
(*vect)[idx.uint()].clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> uint {
|
pub fn len(&self) -> usize {
|
||||||
let vect = self.vect.borrow();
|
let vect = self.vect.borrow();
|
||||||
(*vect).len()
|
(*vect).len()
|
||||||
}
|
}
|
||||||
|
@ -199,7 +199,7 @@ impl StrInterner {
|
||||||
(*self.vect.borrow())[idx.uint()].clone()
|
(*self.vect.borrow())[idx.uint()].clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> uint {
|
pub fn len(&self) -> usize {
|
||||||
self.vect.borrow().len()
|
self.vect.borrow().len()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -130,10 +130,10 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a string and an index, return the first uint >= idx
|
/// Given a string and an index, return the first usize >= idx
|
||||||
/// that is a non-ws-char or is outside of the legal range of
|
/// that is a non-ws-char or is outside of the legal range of
|
||||||
/// the string.
|
/// the string.
|
||||||
fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
|
fn scan_for_non_ws_or_end(a : &str, idx: usize) -> usize {
|
||||||
let mut i = idx;
|
let mut i = idx;
|
||||||
let len = a.len();
|
let len = a.len();
|
||||||
while (i < len) && (is_whitespace(a.char_at(i))) {
|
while (i < len) && (is_whitespace(a.char_at(i))) {
|
||||||
|
|
|
@ -89,7 +89,7 @@ impl<T> SmallVector<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<'a>(&'a self, idx: uint) -> &'a T {
|
pub fn get<'a>(&'a self, idx: usize) -> &'a T {
|
||||||
match self.repr {
|
match self.repr {
|
||||||
One(ref v) if idx == 0 => v,
|
One(ref v) if idx == 0 => v,
|
||||||
Many(ref vs) => &vs[idx],
|
Many(ref vs) => &vs[idx],
|
||||||
|
@ -126,7 +126,7 @@ impl<T> SmallVector<T> {
|
||||||
IntoIter { repr: repr }
|
IntoIter { repr: repr }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> uint {
|
pub fn len(&self) -> usize {
|
||||||
match self.repr {
|
match self.repr {
|
||||||
Zero => 0,
|
Zero => 0,
|
||||||
One(..) => 1,
|
One(..) => 1,
|
||||||
|
@ -165,7 +165,7 @@ impl<T> Iterator for IntoIter<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn size_hint(&self) -> (uint, Option<uint>) {
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
match self.repr {
|
match self.repr {
|
||||||
ZeroIterator => (0, Some(0)),
|
ZeroIterator => (0, Some(0)),
|
||||||
OneIterator(..) => (1, Some(1)),
|
OneIterator(..) => (1, Some(1)),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue