1
Fork 0

Auto merge of #21677 - japaric:no-range, r=alexcrichton

Note: Do not merge until we get a newer snapshot that includes #21374

There was some type inference fallout (see 4th commit) because type inference with `a..b` is not as good as with `range(a, b)` (see #21672).

r? @alexcrichton
This commit is contained in:
bors 2015-01-29 16:28:52 +00:00
commit 265a23320d
366 changed files with 1314 additions and 1337 deletions

View file

@ -12,20 +12,6 @@ pub use self::Mode::*;
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
#[cfg(stage0)] // NOTE: remove impl after snapshot
#[derive(Clone, Copy, PartialEq, Show)]
pub enum Mode {
CompileFail,
RunFail,
RunPass,
RunPassValgrind,
Pretty,
DebugInfoGdb,
DebugInfoLldb,
Codegen
}
#[cfg(not(stage0))] // NOTE: remove cfg after snapshot
#[derive(Clone, Copy, PartialEq, Debug)] #[derive(Clone, Copy, PartialEq, Debug)]
pub enum Mode { pub enum Mode {
CompileFail, CompileFail,

View file

@ -23,6 +23,7 @@
#![feature(os)] #![feature(os)]
#![feature(unicode)] #![feature(unicode)]
#![allow(unstable)]
#![deny(warnings)] #![deny(warnings)]
extern crate test; extern crate test;

View file

@ -17,7 +17,7 @@ pub struct ExpectedError {
pub msg: String, pub msg: String,
} }
#[derive(PartialEq, Show)] #[derive(PartialEq, Debug)]
enum WhichLine { ThisLine, FollowPrevious(uint), AdjustBackward(uint) } enum WhichLine { ThisLine, FollowPrevious(uint), AdjustBackward(uint) }
/// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE" /// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"

View file

@ -147,10 +147,10 @@ for all but the most trivial of situations.
Here's an example of using `Result`: Here's an example of using `Result`:
```rust ```rust
#[derive(Show)] #[derive(Debug)]
enum Version { Version1, Version2 } enum Version { Version1, Version2 }
#[derive(Show)] #[derive(Debug)]
enum ParseError { InvalidHeaderLength, InvalidVersion } enum ParseError { InvalidHeaderLength, InvalidVersion }
fn parse_version(header: &[u8]) -> Result<Version, ParseError> { fn parse_version(header: &[u8]) -> Result<Version, ParseError> {

View file

@ -605,7 +605,7 @@ Sometimes, you need a recursive data structure. The simplest is known as a
```{rust} ```{rust}
#[derive(Show)] #[derive(Debug)]
enum List<T> { enum List<T> {
Cons(T, Box<List<T>>), Cons(T, Box<List<T>>),
Nil, Nil,

View file

@ -37,7 +37,7 @@
//! //!
//! let five = Arc::new(5i); //! let five = Arc::new(5i);
//! //!
//! for _ in range(0u, 10) { //! for _ in 0u..10 {
//! let five = five.clone(); //! let five = five.clone();
//! //!
//! Thread::spawn(move || { //! Thread::spawn(move || {
@ -54,7 +54,7 @@
//! //!
//! let five = Arc::new(Mutex::new(5i)); //! let five = Arc::new(Mutex::new(5i));
//! //!
//! for _ in range(0u, 10) { //! for _ in 0u..10 {
//! let five = five.clone(); //! let five = five.clone();
//! //!
//! Thread::spawn(move || { //! Thread::spawn(move || {
@ -95,10 +95,10 @@ use heap::deallocate;
/// use std::thread::Thread; /// use std::thread::Thread;
/// ///
/// fn main() { /// fn main() {
/// let numbers: Vec<_> = range(0, 100u32).map(|i| i as f32).collect(); /// let numbers: Vec<_> = (0..100u32).map(|i| i as f32).collect();
/// let shared_numbers = Arc::new(numbers); /// let shared_numbers = Arc::new(numbers);
/// ///
/// for _ in range(0u, 10) { /// for _ in 0u..10 {
/// let child_numbers = shared_numbers.clone(); /// let child_numbers = shared_numbers.clone();
/// ///
/// Thread::spawn(move || { /// Thread::spawn(move || {
@ -814,6 +814,6 @@ mod tests {
} }
// Make sure deriving works with Arc<T> // Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Show, Default)] #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
struct Foo { inner: Arc<int> } struct Foo { inner: Arc<int> }
} }

View file

@ -29,7 +29,7 @@
//! Creating a recursive data structure: //! Creating a recursive data structure:
//! //!
//! ``` //! ```
//! #[derive(Show)] //! #[derive(Debug)]
//! enum List<T> { //! enum List<T> {
//! Cons(T, Box<List<T>>), //! Cons(T, Box<List<T>>),
//! Nil, //! Nil,
@ -250,8 +250,6 @@ impl<T: ?Sized> DerefMut for Box<T> {
fn deref_mut(&mut self) -> &mut T { &mut **self } fn deref_mut(&mut self) -> &mut T { &mut **self }
} }
// FIXME(#21363) remove `old_impl_check` when bug is fixed
#[old_impl_check]
impl<'a, T> Iterator for Box<Iterator<Item=T> + 'a> { impl<'a, T> Iterator for Box<Iterator<Item=T> + 'a> {
type Item = T; type Item = T;

View file

@ -70,8 +70,6 @@
#![feature(lang_items, unsafe_destructor)] #![feature(lang_items, unsafe_destructor)]
#![feature(box_syntax)] #![feature(box_syntax)]
#![feature(optin_builtin_traits)] #![feature(optin_builtin_traits)]
// FIXME(#21363) remove `old_impl_check` when bug is fixed
#![feature(old_impl_check)]
#![allow(unknown_features)] #![feature(int_uint)] #![allow(unknown_features)] #![feature(int_uint)]
#![feature(core)] #![feature(core)]
#![feature(hash)] #![feature(hash)]

View file

@ -311,7 +311,7 @@ impl Arena {
#[test] #[test]
fn test_arena_destructors() { fn test_arena_destructors() {
let arena = Arena::new(); let arena = Arena::new();
for i in range(0u, 10) { for i in 0u..10 {
// Arena allocate something with drop glue to make sure it // Arena allocate something with drop glue to make sure it
// doesn't leak. // doesn't leak.
arena.alloc(|| Rc::new(i)); arena.alloc(|| Rc::new(i));
@ -340,7 +340,7 @@ fn test_arena_alloc_nested() {
fn test_arena_destructors_fail() { fn test_arena_destructors_fail() {
let arena = Arena::new(); let arena = Arena::new();
// Put some stuff in the arena. // Put some stuff in the arena.
for i in range(0u, 10) { for i in 0u..10 {
// Arena allocate something with drop glue to make sure it // Arena allocate something with drop glue to make sure it
// doesn't leak. // doesn't leak.
arena.alloc(|| { Rc::new(i) }); arena.alloc(|| { Rc::new(i) });
@ -410,7 +410,7 @@ impl<T> TypedArenaChunk<T> {
// Destroy all the allocated objects. // Destroy all the allocated objects.
if intrinsics::needs_drop::<T>() { if intrinsics::needs_drop::<T>() {
let mut start = self.start(); let mut start = self.start();
for _ in range(0, len) { for _ in 0..len {
ptr::read(start as *const T); // run the destructor on the pointer ptr::read(start as *const T); // run the destructor on the pointer
start = start.offset(mem::size_of::<T>() as int) start = start.offset(mem::size_of::<T>() as int)
} }
@ -530,7 +530,7 @@ mod tests {
#[test] #[test]
pub fn test_copy() { pub fn test_copy() {
let arena = TypedArena::new(); let arena = TypedArena::new();
for _ in range(0u, 100000) { for _ in 0u..100000 {
arena.alloc(Point { arena.alloc(Point {
x: 1, x: 1,
y: 2, y: 2,
@ -585,7 +585,7 @@ mod tests {
#[test] #[test]
pub fn test_noncopy() { pub fn test_noncopy() {
let arena = TypedArena::new(); let arena = TypedArena::new();
for _ in range(0u, 100000) { for _ in 0u..100000 {
arena.alloc(Noncopy { arena.alloc(Noncopy {
string: "hello world".to_string(), string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ), array: vec!( 1, 2, 3, 4, 5 ),

View file

@ -24,7 +24,7 @@ pub fn insert_rand_n<M, I, R>(n: uint,
// setup // setup
let mut rng = rand::weak_rng(); let mut rng = rand::weak_rng();
for _ in range(0, n) { for _ in 0..n {
insert(map, rng.gen::<uint>() % n); insert(map, rng.gen::<uint>() % n);
} }
@ -46,7 +46,7 @@ pub fn insert_seq_n<M, I, R>(n: uint,
R: FnMut(&mut M, uint), R: FnMut(&mut M, uint),
{ {
// setup // setup
for i in range(0u, n) { for i in 0u..n {
insert(map, i * 2); insert(map, i * 2);
} }
@ -70,7 +70,7 @@ pub fn find_rand_n<M, T, I, F>(n: uint,
{ {
// setup // setup
let mut rng = rand::weak_rng(); let mut rng = rand::weak_rng();
let mut keys = range(0, n).map(|_| rng.gen::<uint>() % n) let mut keys = (0..n).map(|_| rng.gen::<uint>() % n)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for k in keys.iter() { for k in keys.iter() {
@ -97,7 +97,7 @@ pub fn find_seq_n<M, T, I, F>(n: uint,
F: FnMut(&M, uint) -> T, F: FnMut(&M, uint) -> T,
{ {
// setup // setup
for i in range(0u, n) { for i in 0u..n {
insert(map, i); insert(map, i);
} }

View file

@ -67,7 +67,7 @@
//! // for a simpler implementation. //! // for a simpler implementation.
//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: uint, goal: uint) -> uint { //! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: uint, goal: uint) -> uint {
//! // dist[node] = current shortest distance from `start` to `node` //! // dist[node] = current shortest distance from `start` to `node`
//! let mut dist: Vec<_> = range(0, adj_list.len()).map(|_| uint::MAX).collect(); //! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| uint::MAX).collect();
//! //!
//! let mut heap = BinaryHeap::new(); //! let mut heap = BinaryHeap::new();
//! //!

View file

@ -66,9 +66,9 @@
//! }; //! };
//! //!
//! // Simple primality tests below our max bound //! // Simple primality tests below our max bound
//! let print_primes = 20; //! let print_primes = 20u;
//! print!("The primes below {} are: ", print_primes); //! print!("The primes below {} are: ", print_primes);
//! for x in range(0, print_primes) { //! for x in 0..print_primes {
//! if primes.contains(&x) { //! if primes.contains(&x) {
//! print!("{} ", x); //! print!("{} ", x);
//! } //! }
@ -104,7 +104,7 @@ type MatchWords<'a> = Chain<Enumerate<Blocks<'a>>, Skip<Take<Enumerate<Repeat<u3
fn reverse_bits(byte: u8) -> u8 { fn reverse_bits(byte: u8) -> u8 {
let mut result = 0; let mut result = 0;
for i in range(0, u8::BITS) { for i in 0..u8::BITS {
result |= ((byte >> i) & 1) << (u8::BITS - 1 - i); result |= ((byte >> i) & 1) << (u8::BITS - 1 - i);
} }
result result
@ -320,7 +320,7 @@ impl Bitv {
bitv.nbits = len; bitv.nbits = len;
for i in range(0, complete_words) { for i in 0..complete_words {
bitv.storage.push( bitv.storage.push(
((reverse_bits(bytes[i * 4 + 0]) as u32) << 0) | ((reverse_bits(bytes[i * 4 + 0]) as u32) << 0) |
((reverse_bits(bytes[i * 4 + 1]) as u32) << 8) | ((reverse_bits(bytes[i * 4 + 1]) as u32) << 8) |
@ -353,7 +353,7 @@ impl Bitv {
/// ``` /// ```
pub fn from_fn<F>(len: uint, mut f: F) -> Bitv where F: FnMut(uint) -> bool { pub fn from_fn<F>(len: uint, mut f: F) -> Bitv where F: FnMut(uint) -> bool {
let mut bitv = Bitv::from_elem(len, false); let mut bitv = Bitv::from_elem(len, false);
for i in range(0u, len) { for i in 0u..len {
bitv.set(i, f(i)); bitv.set(i, f(i));
} }
bitv bitv
@ -660,7 +660,7 @@ impl Bitv {
let len = self.nbits/8 + let len = self.nbits/8 +
if self.nbits % 8 == 0 { 0 } else { 1 }; if self.nbits % 8 == 0 { 0 } else { 1 };
range(0, len).map(|i| (0..len).map(|i|
bit(self, i, 0) | bit(self, i, 0) |
bit(self, i, 1) | bit(self, i, 1) |
bit(self, i, 2) | bit(self, i, 2) |
@ -830,7 +830,7 @@ impl Bitv {
// Fill in words after the old tail word // Fill in words after the old tail word
let stop_idx = cmp::min(self.storage.len(), new_nblocks); let stop_idx = cmp::min(self.storage.len(), new_nblocks);
for idx in range(old_last_word + 1, stop_idx) { for idx in old_last_word + 1..stop_idx {
self.storage[idx] = full_value; self.storage[idx] = full_value;
} }
@ -2232,12 +2232,12 @@ mod tests {
#[test] #[test]
fn test_equal_sneaky_big() { fn test_equal_sneaky_big() {
let mut a = Bitv::from_elem(100, false); let mut a = Bitv::from_elem(100, false);
for i in range(0u, 100) { for i in 0u..100 {
a.set(i, true); a.set(i, true);
} }
let mut b = Bitv::from_elem(100, true); let mut b = Bitv::from_elem(100, true);
for i in range(0u, 100) { for i in 0u..100 {
b.set(i, true); b.set(i, true);
} }
@ -2283,7 +2283,7 @@ mod tests {
assert_eq!(bitv.iter().collect::<Vec<bool>>(), bools); assert_eq!(bitv.iter().collect::<Vec<bool>>(), bools);
let long = range(0, 10000).map(|i| i % 2 == 0).collect::<Vec<_>>(); let long = (0i32..10000).map(|i| i % 2 == 0).collect::<Vec<_>>();
let bitv: Bitv = long.iter().map(|n| *n).collect(); let bitv: Bitv = long.iter().map(|n| *n).collect();
assert_eq!(bitv.iter().collect::<Vec<bool>>(), long) assert_eq!(bitv.iter().collect::<Vec<bool>>(), long)
} }
@ -2526,7 +2526,7 @@ mod bitv_bench {
let mut r = rng(); let mut r = rng();
let mut bitv = 0 as uint; let mut bitv = 0 as uint;
b.iter(|| { b.iter(|| {
for _ in range(0u, 100) { for _ in 0u..100 {
bitv |= 1 << ((r.next_u32() as uint) % u32::BITS); bitv |= 1 << ((r.next_u32() as uint) % u32::BITS);
} }
black_box(&bitv); black_box(&bitv);
@ -2538,7 +2538,7 @@ mod bitv_bench {
let mut r = rng(); let mut r = rng();
let mut bitv = Bitv::from_elem(BENCH_BITS, false); let mut bitv = Bitv::from_elem(BENCH_BITS, false);
b.iter(|| { b.iter(|| {
for _ in range(0u, 100) { for _ in 0u..100 {
bitv.set((r.next_u32() as uint) % BENCH_BITS, true); bitv.set((r.next_u32() as uint) % BENCH_BITS, true);
} }
black_box(&bitv); black_box(&bitv);
@ -2550,7 +2550,7 @@ mod bitv_bench {
let mut r = rng(); let mut r = rng();
let mut bitv = Bitv::from_elem(BENCH_BITS, false); let mut bitv = Bitv::from_elem(BENCH_BITS, false);
b.iter(|| { b.iter(|| {
for _ in range(0u, 100) { for _ in 0u..100 {
bitv.set((r.next_u32() as uint) % BENCH_BITS, r.gen()); bitv.set((r.next_u32() as uint) % BENCH_BITS, r.gen());
} }
black_box(&bitv); black_box(&bitv);
@ -2562,7 +2562,7 @@ mod bitv_bench {
let mut r = rng(); let mut r = rng();
let mut bitv = Bitv::from_elem(u32::BITS, false); let mut bitv = Bitv::from_elem(u32::BITS, false);
b.iter(|| { b.iter(|| {
for _ in range(0u, 100) { for _ in 0u..100 {
bitv.set((r.next_u32() as uint) % u32::BITS, true); bitv.set((r.next_u32() as uint) % u32::BITS, true);
} }
black_box(&bitv); black_box(&bitv);
@ -2583,7 +2583,7 @@ mod bitv_bench {
let bitv = Bitv::from_elem(u32::BITS, false); let bitv = Bitv::from_elem(u32::BITS, false);
b.iter(|| { b.iter(|| {
let mut sum = 0u; let mut sum = 0u;
for _ in range(0u, 10) { for _ in 0u..10 {
for pres in bitv.iter() { for pres in bitv.iter() {
sum += pres as uint; sum += pres as uint;
} }
@ -2647,7 +2647,7 @@ mod bitv_set_test {
let idxs: Vec<uint> = bitv.iter().collect(); let idxs: Vec<uint> = bitv.iter().collect();
assert_eq!(idxs, vec![0, 2, 3]); assert_eq!(idxs, vec![0, 2, 3]);
let long: BitvSet = range(0u, 10000).filter(|&n| n % 2 == 0).collect(); let long: BitvSet = (0u..10000).filter(|&n| n % 2 == 0).collect();
let real = range_step(0, 10000, 2).collect::<Vec<uint>>(); let real = range_step(0, 10000, 2).collect::<Vec<uint>>();
let idxs: Vec<uint> = long.iter().collect(); let idxs: Vec<uint> = long.iter().collect();
@ -3021,7 +3021,7 @@ mod bitv_set_bench {
let mut r = rng(); let mut r = rng();
let mut bitv = BitvSet::new(); let mut bitv = BitvSet::new();
b.iter(|| { b.iter(|| {
for _ in range(0u, 100) { for _ in 0u..100 {
bitv.insert((r.next_u32() as uint) % u32::BITS); bitv.insert((r.next_u32() as uint) % u32::BITS);
} }
black_box(&bitv); black_box(&bitv);
@ -3033,7 +3033,7 @@ mod bitv_set_bench {
let mut r = rng(); let mut r = rng();
let mut bitv = BitvSet::new(); let mut bitv = BitvSet::new();
b.iter(|| { b.iter(|| {
for _ in range(0u, 100) { for _ in 0u..100 {
bitv.insert((r.next_u32() as uint) % BENCH_BITS); bitv.insert((r.next_u32() as uint) % BENCH_BITS);
} }
black_box(&bitv); black_box(&bitv);

View file

@ -1601,39 +1601,39 @@ mod test {
let size = 10000u; let size = 10000u;
assert_eq!(map.len(), 0); assert_eq!(map.len(), 0);
for i in range(0, size) { for i in 0..size {
assert_eq!(map.insert(i, 10*i), None); assert_eq!(map.insert(i, 10*i), None);
assert_eq!(map.len(), i + 1); assert_eq!(map.len(), i + 1);
} }
for i in range(0, size) { for i in 0..size {
assert_eq!(map.get(&i).unwrap(), &(i*10)); assert_eq!(map.get(&i).unwrap(), &(i*10));
} }
for i in range(size, size*2) { for i in size..size*2 {
assert_eq!(map.get(&i), None); assert_eq!(map.get(&i), None);
} }
for i in range(0, size) { for i in 0..size {
assert_eq!(map.insert(i, 100*i), Some(10*i)); assert_eq!(map.insert(i, 100*i), Some(10*i));
assert_eq!(map.len(), size); assert_eq!(map.len(), size);
} }
for i in range(0, size) { for i in 0..size {
assert_eq!(map.get(&i).unwrap(), &(i*100)); assert_eq!(map.get(&i).unwrap(), &(i*100));
} }
for i in range(0, size/2) { for i in 0..size/2 {
assert_eq!(map.remove(&(i*2)), Some(i*200)); assert_eq!(map.remove(&(i*2)), Some(i*200));
assert_eq!(map.len(), size - i - 1); assert_eq!(map.len(), size - i - 1);
} }
for i in range(0, size/2) { for i in 0..size/2 {
assert_eq!(map.get(&(2*i)), None); assert_eq!(map.get(&(2*i)), None);
assert_eq!(map.get(&(2*i+1)).unwrap(), &(i*200 + 100)); assert_eq!(map.get(&(2*i+1)).unwrap(), &(i*200 + 100));
} }
for i in range(0, size/2) { for i in 0..size/2 {
assert_eq!(map.remove(&(2*i)), None); assert_eq!(map.remove(&(2*i)), None);
assert_eq!(map.remove(&(2*i+1)), Some(i*200 + 100)); assert_eq!(map.remove(&(2*i+1)), Some(i*200 + 100));
assert_eq!(map.len(), size/2 - i - 1); assert_eq!(map.len(), size/2 - i - 1);
@ -1661,10 +1661,10 @@ mod test {
let size = 10000u; let size = 10000u;
// Forwards // Forwards
let mut map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect(); let mut map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> { fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> {
for i in range(0, size) { for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i))); assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
assert_eq!(iter.next().unwrap(), (i, i)); assert_eq!(iter.next().unwrap(), (i, i));
} }
@ -1681,10 +1681,10 @@ mod test {
let size = 10000u; let size = 10000u;
// Forwards // Forwards
let mut map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect(); let mut map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> { fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> {
for i in range(0, size) { for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i))); assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1)); assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
} }
@ -1701,16 +1701,16 @@ mod test {
let size = 10000u; let size = 10000u;
// Forwards // Forwards
let mut map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect(); let mut map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: uint, mut iter: T) fn test<T>(size: uint, mut iter: T)
where T: Iterator<Item=(uint, uint)> + DoubleEndedIterator { where T: Iterator<Item=(uint, uint)> + DoubleEndedIterator {
for i in range(0, size / 4) { for i in 0..size / 4 {
assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2))); assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
assert_eq!(iter.next().unwrap(), (i, i)); assert_eq!(iter.next().unwrap(), (i, i));
assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1)); assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1));
} }
for i in range(size / 4, size * 3 / 4) { for i in size / 4..size * 3 / 4 {
assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i))); assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i)));
assert_eq!(iter.next().unwrap(), (i, i)); assert_eq!(iter.next().unwrap(), (i, i));
} }
@ -1727,10 +1727,10 @@ mod test {
let size = 5u; let size = 5u;
// Forwards // Forwards
let map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect(); let map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let mut j = 0u; let mut j = 0u;
for ((&k, &v), i) in map.range(Included(&2), Unbounded).zip(range(2u, size)) { for ((&k, &v), i) in map.range(Included(&2), Unbounded).zip(2u..size) {
assert_eq!(k, i); assert_eq!(k, i);
assert_eq!(v, i); assert_eq!(v, i);
j += 1; j += 1;
@ -1741,11 +1741,11 @@ mod test {
#[test] #[test]
fn test_range_1000() { fn test_range_1000() {
let size = 1000u; let size = 1000u;
let map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect(); let map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
fn test(map: &BTreeMap<uint, uint>, size: uint, min: Bound<&uint>, max: Bound<&uint>) { fn test(map: &BTreeMap<uint, uint>, size: uint, min: Bound<&uint>, max: Bound<&uint>) {
let mut kvs = map.range(min, max).map(|(&k, &v)| (k, v)); let mut kvs = map.range(min, max).map(|(&k, &v)| (k, v));
let mut pairs = range(0, size).map(|i| (i, i)); let mut pairs = (0..size).map(|i| (i, i));
for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
assert_eq!(kv, pair); assert_eq!(kv, pair);
@ -1764,10 +1764,10 @@ mod test {
#[test] #[test]
fn test_range() { fn test_range() {
let size = 200u; let size = 200u;
let map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect(); let map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
for i in range(0, size) { for i in 0..size {
for j in range(i, size) { for j in i..size {
let mut kvs = map.range(Included(&i), Included(&j)).map(|(&k, &v)| (k, v)); let mut kvs = map.range(Included(&i), Included(&j)).map(|(&k, &v)| (k, v));
let mut pairs = range_inclusive(i, j).map(|i| (i, i)); let mut pairs = range_inclusive(i, j).map(|i| (i, i));
@ -1917,7 +1917,7 @@ mod bench {
let mut map = BTreeMap::<uint, uint>::new(); let mut map = BTreeMap::<uint, uint>::new();
let mut rng = weak_rng(); let mut rng = weak_rng();
for _ in range(0, size) { for _ in 0..size {
map.insert(rng.gen(), rng.gen()); map.insert(rng.gen(), rng.gen());
} }

View file

@ -501,7 +501,7 @@ impl<K: Clone, V: Clone> Clone for Node<K, V> {
/// let mut small_node = Node::make_leaf_root(3); /// let mut small_node = Node::make_leaf_root(3);
/// let mut large_node = Node::make_leaf_root(100); /// let mut large_node = Node::make_leaf_root(100);
/// ///
/// for i in range(0, 100) { /// for i in 0..100 {
/// // Insert to the end /// // Insert to the end
/// large_node.edge_handle(i).insert_as_leaf(i, i); /// large_node.edge_handle(i).insert_as_leaf(i, i);
/// } /// }

View file

@ -592,14 +592,14 @@ impl<T> DList<T> {
// instead of skipping using .skip() (which creates a new struct), // instead of skipping using .skip() (which creates a new struct),
// we skip manually so we can access the head field without // we skip manually so we can access the head field without
// depending on implementation details of Skip // depending on implementation details of Skip
for _ in range(0, at - 1) { for _ in 0..at - 1 {
iter.next(); iter.next();
} }
iter.head iter.head
} else { } else {
// better off starting from the end // better off starting from the end
let mut iter = self.iter_mut(); let mut iter = self.iter_mut();
for _ in range(0, len - 1 - (at - 1)) { for _ in 0..len - 1 - (at - 1) {
iter.next_back(); iter.next_back();
} }
iter.tail iter.tail
@ -1070,10 +1070,10 @@ mod tests {
let mut n = m.split_off(2); let mut n = m.split_off(2);
assert_eq!(m.len(), 2); assert_eq!(m.len(), 2);
assert_eq!(n.len(), 3); assert_eq!(n.len(), 3);
for elt in range(1i, 3) { for elt in 1i..3 {
assert_eq!(m.pop_front(), Some(elt)); assert_eq!(m.pop_front(), Some(elt));
} }
for elt in range(3i, 6) { for elt in 3i..6 {
assert_eq!(n.pop_front(), Some(elt)); assert_eq!(n.pop_front(), Some(elt));
} }
} }
@ -1084,10 +1084,10 @@ mod tests {
let mut n = m.split_off(4); let mut n = m.split_off(4);
assert_eq!(m.len(), 4); assert_eq!(m.len(), 4);
assert_eq!(n.len(), 1); assert_eq!(n.len(), 1);
for elt in range(1i, 5) { for elt in 1i..5 {
assert_eq!(m.pop_front(), Some(elt)); assert_eq!(m.pop_front(), Some(elt));
} }
for elt in range(5i, 6) { for elt in 5i..6 {
assert_eq!(n.pop_front(), Some(elt)); assert_eq!(n.pop_front(), Some(elt));
} }
} }
@ -1325,7 +1325,7 @@ mod tests {
#[test] #[test]
fn test_fuzz() { fn test_fuzz() {
for _ in range(0u, 25) { for _ in 0u..25 {
fuzz_test(3); fuzz_test(3);
fuzz_test(16); fuzz_test(16);
fuzz_test(189); fuzz_test(189);
@ -1334,7 +1334,7 @@ mod tests {
#[test] #[test]
fn test_show() { fn test_show() {
let list: DList<int> = range(0i, 10).collect(); let list: DList<int> = (0i..10).collect();
assert_eq!(format!("{:?}", list), "DList [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); assert_eq!(format!("{:?}", list), "DList [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
let list: DList<&str> = vec!["just", "one", "test", "more"].iter() let list: DList<&str> = vec!["just", "one", "test", "more"].iter()
@ -1347,7 +1347,7 @@ mod tests {
fn fuzz_test(sz: int) { fn fuzz_test(sz: int) {
let mut m: DList<int> = DList::new(); let mut m: DList<int> = DList::new();
let mut v = vec![]; let mut v = vec![];
for i in range(0, sz) { for i in 0..sz {
check_links(&m); check_links(&m);
let r: u8 = rand::random(); let r: u8 = rand::random();
match r % 6 { match r % 6 {

View file

@ -272,7 +272,7 @@ mod test {
use super::{EnumSet, CLike}; use super::{EnumSet, CLike};
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
#[repr(uint)] #[repr(uint)]
enum Foo { enum Foo {
A, B, C A, B, C

View file

@ -388,7 +388,7 @@ impl<T> RingBuf<T> {
/// use std::collections::RingBuf; /// use std::collections::RingBuf;
/// ///
/// let mut buf = RingBuf::with_capacity(15); /// let mut buf = RingBuf::with_capacity(15);
/// buf.extend(range(0u, 4)); /// buf.extend(0u..4);
/// assert_eq!(buf.capacity(), 15); /// assert_eq!(buf.capacity(), 15);
/// buf.shrink_to_fit(); /// buf.shrink_to_fit();
/// assert!(buf.capacity() >= 4); /// assert!(buf.capacity() >= 4);
@ -483,7 +483,7 @@ impl<T> RingBuf<T> {
#[unstable(feature = "collections", #[unstable(feature = "collections",
reason = "matches collection reform specification; waiting on panic semantics")] reason = "matches collection reform specification; waiting on panic semantics")]
pub fn truncate(&mut self, len: uint) { pub fn truncate(&mut self, len: uint) {
for _ in range(len, self.len()) { for _ in len..self.len() {
self.pop_back(); self.pop_back();
} }
} }
@ -1719,21 +1719,21 @@ mod tests {
#[test] #[test]
fn test_push_front_grow() { fn test_push_front_grow() {
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
for i in range(0u, 66) { for i in 0u..66 {
deq.push_front(i); deq.push_front(i);
} }
assert_eq!(deq.len(), 66); assert_eq!(deq.len(), 66);
for i in range(0u, 66) { for i in 0u..66 {
assert_eq!(deq[i], 65 - i); assert_eq!(deq[i], 65 - i);
} }
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
for i in range(0u, 66) { for i in 0u..66 {
deq.push_back(i); deq.push_back(i);
} }
for i in range(0u, 66) { for i in 0u..66 {
assert_eq!(deq[i], i); assert_eq!(deq[i], i);
} }
} }
@ -1741,7 +1741,7 @@ mod tests {
#[test] #[test]
fn test_index() { fn test_index() {
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
for i in range(1u, 4) { for i in 1u..4 {
deq.push_front(i); deq.push_front(i);
} }
assert_eq!(deq[1], 2); assert_eq!(deq[1], 2);
@ -1751,7 +1751,7 @@ mod tests {
#[should_fail] #[should_fail]
fn test_index_out_of_bounds() { fn test_index_out_of_bounds() {
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
for i in range(1u, 4) { for i in 1u..4 {
deq.push_front(i); deq.push_front(i);
} }
deq[3]; deq[3];
@ -1769,7 +1769,7 @@ mod tests {
fn bench_push_back_100(b: &mut test::Bencher) { fn bench_push_back_100(b: &mut test::Bencher) {
let mut deq = RingBuf::with_capacity(101); let mut deq = RingBuf::with_capacity(101);
b.iter(|| { b.iter(|| {
for i in range(0i, 100) { for i in 0i..100 {
deq.push_back(i); deq.push_back(i);
} }
deq.head = 0; deq.head = 0;
@ -1781,7 +1781,7 @@ mod tests {
fn bench_push_front_100(b: &mut test::Bencher) { fn bench_push_front_100(b: &mut test::Bencher) {
let mut deq = RingBuf::with_capacity(101); let mut deq = RingBuf::with_capacity(101);
b.iter(|| { b.iter(|| {
for i in range(0i, 100) { for i in 0i..100 {
deq.push_front(i); deq.push_front(i);
} }
deq.head = 0; deq.head = 0;
@ -1819,7 +1819,7 @@ mod tests {
fn bench_grow_1025(b: &mut test::Bencher) { fn bench_grow_1025(b: &mut test::Bencher) {
b.iter(|| { b.iter(|| {
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
for i in range(0i, 1025) { for i in 0i..1025 {
deq.push_front(i); deq.push_front(i);
} }
test::black_box(deq); test::black_box(deq);
@ -1828,7 +1828,7 @@ mod tests {
#[bench] #[bench]
fn bench_iter_1000(b: &mut test::Bencher) { fn bench_iter_1000(b: &mut test::Bencher) {
let ring: RingBuf<int> = range(0i, 1000).collect(); let ring: RingBuf<int> = (0i..1000).collect();
b.iter(|| { b.iter(|| {
let mut sum = 0; let mut sum = 0;
@ -1841,7 +1841,7 @@ mod tests {
#[bench] #[bench]
fn bench_mut_iter_1000(b: &mut test::Bencher) { fn bench_mut_iter_1000(b: &mut test::Bencher) {
let mut ring: RingBuf<int> = range(0i, 1000).collect(); let mut ring: RingBuf<int> = (0i..1000).collect();
b.iter(|| { b.iter(|| {
let mut sum = 0; let mut sum = 0;
@ -1852,21 +1852,21 @@ mod tests {
}) })
} }
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
enum Taggy { enum Taggy {
One(int), One(int),
Two(int, int), Two(int, int),
Three(int, int, int), Three(int, int, int),
} }
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
enum Taggypar<T> { enum Taggypar<T> {
Onepar(int), Onepar(int),
Twopar(int, int), Twopar(int, int),
Threepar(int, int, int), Threepar(int, int, int),
} }
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
struct RecCy { struct RecCy {
x: int, x: int,
y: int, y: int,
@ -1977,7 +1977,7 @@ mod tests {
#[test] #[test]
fn test_swap() { fn test_swap() {
let mut d: RingBuf<int> = range(0i, 5).collect(); let mut d: RingBuf<int> = (0i..5).collect();
d.pop_front(); d.pop_front();
d.swap(0, 3); d.swap(0, 3);
assert_eq!(d.iter().map(|&x|x).collect::<Vec<int>>(), vec!(4, 2, 3, 1)); assert_eq!(d.iter().map(|&x|x).collect::<Vec<int>>(), vec!(4, 2, 3, 1));
@ -1989,7 +1989,7 @@ mod tests {
assert_eq!(d.iter().next(), None); assert_eq!(d.iter().next(), None);
assert_eq!(d.iter().size_hint(), (0, Some(0))); assert_eq!(d.iter().size_hint(), (0, Some(0)));
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
{ {
@ -1997,7 +1997,7 @@ mod tests {
assert_eq!(d.iter().collect::<Vec<&int>>(), b); assert_eq!(d.iter().collect::<Vec<&int>>(), b);
} }
for i in range(6i, 9) { for i in 6i..9 {
d.push_front(i); d.push_front(i);
} }
{ {
@ -2020,7 +2020,7 @@ mod tests {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
assert_eq!(d.iter().rev().next(), None); assert_eq!(d.iter().rev().next(), None);
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
{ {
@ -2028,7 +2028,7 @@ mod tests {
assert_eq!(d.iter().rev().collect::<Vec<&int>>(), b); assert_eq!(d.iter().rev().collect::<Vec<&int>>(), b);
} }
for i in range(6i, 9) { for i in 6i..9 {
d.push_front(i); d.push_front(i);
} }
let b: &[_] = &[&4,&3,&2,&1,&0,&6,&7,&8]; let b: &[_] = &[&4,&3,&2,&1,&0,&6,&7,&8];
@ -2055,7 +2055,7 @@ mod tests {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
assert!(d.iter_mut().next().is_none()); assert!(d.iter_mut().next().is_none());
for i in range(0u, 3) { for i in 0u..3 {
d.push_front(i); d.push_front(i);
} }
@ -2078,7 +2078,7 @@ mod tests {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
assert!(d.iter_mut().rev().next().is_none()); assert!(d.iter_mut().rev().next().is_none());
for i in range(0u, 3) { for i in 0u..3 {
d.push_front(i); d.push_front(i);
} }
@ -2112,7 +2112,7 @@ mod tests {
// simple iter // simple iter
{ {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
@ -2123,10 +2123,10 @@ mod tests {
// wrapped iter // wrapped iter
{ {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
for i in range(6, 9) { for i in 6i..9 {
d.push_front(i); d.push_front(i);
} }
@ -2137,10 +2137,10 @@ mod tests {
// partially used // partially used
{ {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
for i in range(6, 9) { for i in 6i..9 {
d.push_front(i); d.push_front(i);
} }
@ -2176,7 +2176,7 @@ mod tests {
// simple iter // simple iter
{ {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
@ -2187,10 +2187,10 @@ mod tests {
// wrapped iter // wrapped iter
{ {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
for i in range(6, 9) { for i in 6i..9 {
d.push_front(i); d.push_front(i);
} }
@ -2201,10 +2201,10 @@ mod tests {
// partially used // partially used
{ {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
for i in range(0i, 5) { for i in 0i..5 {
d.push_back(i); d.push_back(i);
} }
for i in range(6, 9) { for i in 6i..9 {
d.push_front(i); d.push_front(i);
} }
@ -2309,7 +2309,7 @@ mod tests {
#[test] #[test]
fn test_show() { fn test_show() {
let ringbuf: RingBuf<int> = range(0i, 10).collect(); let ringbuf: RingBuf<int> = (0i..10).collect();
assert_eq!(format!("{:?}", ringbuf), "RingBuf [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); assert_eq!(format!("{:?}", ringbuf), "RingBuf [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
let ringbuf: RingBuf<&str> = vec!["just", "one", "test", "more"].iter() let ringbuf: RingBuf<&str> = vec!["just", "one", "test", "more"].iter()
@ -2389,41 +2389,41 @@ mod tests {
// test growth path A // test growth path A
// [T o o H] -> [T o o H . . . . ] // [T o o H] -> [T o o H . . . . ]
let mut ring = RingBuf::with_capacity(4); let mut ring = RingBuf::with_capacity(4);
for i in range(0i, 3) { for i in 0i..3 {
ring.push_back(i); ring.push_back(i);
} }
ring.reserve(7); ring.reserve(7);
for i in range(0i, 3) { for i in 0i..3 {
assert_eq!(ring.pop_front(), Some(i)); assert_eq!(ring.pop_front(), Some(i));
} }
// test growth path B // test growth path B
// [H T o o] -> [. T o o H . . . ] // [H T o o] -> [. T o o H . . . ]
let mut ring = RingBuf::with_capacity(4); let mut ring = RingBuf::with_capacity(4);
for i in range(0i, 1) { for i in 0i..1 {
ring.push_back(i); ring.push_back(i);
assert_eq!(ring.pop_front(), Some(i)); assert_eq!(ring.pop_front(), Some(i));
} }
for i in range(0i, 3) { for i in 0i..3 {
ring.push_back(i); ring.push_back(i);
} }
ring.reserve(7); ring.reserve(7);
for i in range(0i, 3) { for i in 0i..3 {
assert_eq!(ring.pop_front(), Some(i)); assert_eq!(ring.pop_front(), Some(i));
} }
// test growth path C // test growth path C
// [o o H T] -> [o o H . . . . T ] // [o o H T] -> [o o H . . . . T ]
let mut ring = RingBuf::with_capacity(4); let mut ring = RingBuf::with_capacity(4);
for i in range(0i, 3) { for i in 0i..3 {
ring.push_back(i); ring.push_back(i);
assert_eq!(ring.pop_front(), Some(i)); assert_eq!(ring.pop_front(), Some(i));
} }
for i in range(0i, 3) { for i in 0i..3 {
ring.push_back(i); ring.push_back(i);
} }
ring.reserve(7); ring.reserve(7);
for i in range(0i, 3) { for i in 0i..3 {
assert_eq!(ring.pop_front(), Some(i)); assert_eq!(ring.pop_front(), Some(i));
} }
} }
@ -2463,7 +2463,7 @@ mod tests {
#[test] #[test]
fn test_get_mut() { fn test_get_mut() {
let mut ring = RingBuf::new(); let mut ring = RingBuf::new();
for i in range(0i, 3) { for i in 0i..3 {
ring.push_back(i); ring.push_back(i);
} }
@ -2492,27 +2492,27 @@ mod tests {
let usable_cap = tester.capacity(); let usable_cap = tester.capacity();
let final_len = usable_cap / 2; let final_len = usable_cap / 2;
for len in range(0, final_len) { for len in 0..final_len {
let expected = if back { let expected = if back {
range(0, len).collect() (0..len).collect()
} else { } else {
range(0, len).rev().collect() (0..len).rev().collect()
}; };
for tail_pos in range(0, usable_cap) { for tail_pos in 0..usable_cap {
tester.tail = tail_pos; tester.tail = tail_pos;
tester.head = tail_pos; tester.head = tail_pos;
if back { if back {
for i in range(0, len * 2) { for i in 0..len * 2 {
tester.push_front(i); tester.push_front(i);
} }
for i in range(0, len) { for i in 0..len {
assert_eq!(tester.swap_back_remove(i), Some(len * 2 - 1 - i)); assert_eq!(tester.swap_back_remove(i), Some(len * 2 - 1 - i));
} }
} else { } else {
for i in range(0, len * 2) { for i in 0..len * 2 {
tester.push_back(i); tester.push_back(i);
} }
for i in range(0, len) { for i in 0..len {
let idx = tester.len() - 1 - i; let idx = tester.len() - 1 - i;
assert_eq!(tester.swap_front_remove(idx), Some(len * 2 - 1 - i)); assert_eq!(tester.swap_front_remove(idx), Some(len * 2 - 1 - i));
} }
@ -2540,14 +2540,14 @@ mod tests {
// len is the length *after* insertion // len is the length *after* insertion
for len in range(1, cap) { for len in 1..cap {
// 0, 1, 2, .., len - 1 // 0, 1, 2, .., len - 1
let expected = iter::count(0, 1).take(len).collect(); let expected = iter::count(0, 1).take(len).collect();
for tail_pos in range(0, cap) { for tail_pos in 0..cap {
for to_insert in range(0, len) { for to_insert in 0..len {
tester.tail = tail_pos; tester.tail = tail_pos;
tester.head = tail_pos; tester.head = tail_pos;
for i in range(0, len) { for i in 0..len {
if i != to_insert { if i != to_insert {
tester.push_back(i); tester.push_back(i);
} }
@ -2573,14 +2573,14 @@ mod tests {
let cap = tester.capacity(); let cap = tester.capacity();
// len is the length *after* removal // len is the length *after* removal
for len in range(0, cap - 1) { for len in 0..cap - 1 {
// 0, 1, 2, .., len - 1 // 0, 1, 2, .., len - 1
let expected = iter::count(0, 1).take(len).collect(); let expected = iter::count(0, 1).take(len).collect();
for tail_pos in range(0, cap) { for tail_pos in 0..cap {
for to_remove in range(0, len + 1) { for to_remove in 0..len + 1 {
tester.tail = tail_pos; tester.tail = tail_pos;
tester.head = tail_pos; tester.head = tail_pos;
for i in range(0, len) { for i in 0..len {
if i == to_remove { if i == to_remove {
tester.push_back(1234); tester.push_back(1234);
} }
@ -2611,14 +2611,14 @@ mod tests {
tester.reserve(63); tester.reserve(63);
let max_cap = tester.capacity(); let max_cap = tester.capacity();
for len in range(0, cap + 1) { for len in 0..cap + 1 {
// 0, 1, 2, .., len - 1 // 0, 1, 2, .., len - 1
let expected = iter::count(0, 1).take(len).collect(); let expected = iter::count(0, 1).take(len).collect();
for tail_pos in range(0, max_cap + 1) { for tail_pos in 0..max_cap + 1 {
tester.tail = tail_pos; tester.tail = tail_pos;
tester.head = tail_pos; tester.head = tail_pos;
tester.reserve(63); tester.reserve(63);
for i in range(0, len) { for i in 0..len {
tester.push_back(i); tester.push_back(i);
} }
tester.shrink_to_fit(); tester.shrink_to_fit();
@ -2648,20 +2648,20 @@ mod tests {
let cap = ring.capacity() as int; let cap = ring.capacity() as int;
let first = cap/2; let first = cap/2;
let last = cap - first; let last = cap - first;
for i in range(0, first) { for i in 0..first {
ring.push_back(i); ring.push_back(i);
let (left, right) = ring.as_slices(); let (left, right) = ring.as_slices();
let expected: Vec<_> = range(0, i+1).collect(); let expected: Vec<_> = (0..i+1).collect();
assert_eq!(left, expected); assert_eq!(left, expected);
assert_eq!(right, []); assert_eq!(right, []);
} }
for j in range(-last, 0) { for j in -last..0 {
ring.push_front(j); ring.push_front(j);
let (left, right) = ring.as_slices(); let (left, right) = ring.as_slices();
let expected_left: Vec<_> = range(-last, j+1).rev().collect(); let expected_left: Vec<_> = (-last..j+1).rev().collect();
let expected_right: Vec<_> = range(0, first).collect(); let expected_right: Vec<_> = (0..first).collect();
assert_eq!(left, expected_left); assert_eq!(left, expected_left);
assert_eq!(right, expected_right); assert_eq!(right, expected_right);
} }
@ -2676,20 +2676,20 @@ mod tests {
let cap = ring.capacity() as int; let cap = ring.capacity() as int;
let first = cap/2; let first = cap/2;
let last = cap - first; let last = cap - first;
for i in range(0, first) { for i in 0..first {
ring.push_back(i); ring.push_back(i);
let (left, right) = ring.as_mut_slices(); let (left, right) = ring.as_mut_slices();
let expected: Vec<_> = range(0, i+1).collect(); let expected: Vec<_> = (0..i+1).collect();
assert_eq!(left, expected); assert_eq!(left, expected);
assert_eq!(right, []); assert_eq!(right, []);
} }
for j in range(-last, 0) { for j in -last..0 {
ring.push_front(j); ring.push_front(j);
let (left, right) = ring.as_mut_slices(); let (left, right) = ring.as_mut_slices();
let expected_left: Vec<_> = range(-last, j+1).rev().collect(); let expected_left: Vec<_> = (-last..j+1).rev().collect();
let expected_right: Vec<_> = range(0, first).collect(); let expected_right: Vec<_> = (0..first).collect();
assert_eq!(left, expected_left); assert_eq!(left, expected_left);
assert_eq!(right, expected_right); assert_eq!(right, expected_right);
} }

View file

@ -94,7 +94,7 @@ use core::clone::Clone;
use core::cmp::Ordering::{self, Greater, Less}; use core::cmp::Ordering::{self, Greater, Less};
use core::cmp::{self, Ord, PartialEq}; use core::cmp::{self, Ord, PartialEq};
use core::iter::{Iterator, IteratorExt}; use core::iter::{Iterator, IteratorExt};
use core::iter::{range, range_step, MultiplicativeIterator}; use core::iter::{range_step, MultiplicativeIterator};
use core::marker::Sized; use core::marker::Sized;
use core::mem::size_of; use core::mem::size_of;
use core::mem; use core::mem;
@ -1165,7 +1165,7 @@ impl ElementSwaps {
// element (equal to the original index). // element (equal to the original index).
ElementSwaps{ ElementSwaps{
emit_reset: true, emit_reset: true,
sdir: range(0, length).map(|i| SizeDirection{ size: i, dir: Neg }).collect(), sdir: (0..length).map(|i| SizeDirection{ size: i, dir: Neg }).collect(),
swaps_made: 0 swaps_made: 0
} }
} }
@ -1254,7 +1254,7 @@ impl Iterator for ElementSwaps {
#[inline] #[inline]
fn size_hint(&self) -> (uint, Option<uint>) { fn size_hint(&self) -> (uint, Option<uint>) {
// For a vector of size n, there are exactly n! permutations. // For a vector of size n, there are exactly n! permutations.
let n = range(2, self.sdir.len() + 1).product(); let n = (2..self.sdir.len() + 1).product();
(n - self.swaps_made, Some(n - self.swaps_made)) (n - self.swaps_made, Some(n - self.swaps_made))
} }
} }
@ -1305,7 +1305,7 @@ fn insertion_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O
let buf_v = v.as_mut_ptr(); let buf_v = v.as_mut_ptr();
// 1 <= i < len; // 1 <= i < len;
for i in range(1, len) { for i in 1..len {
// j satisfies: 0 <= j <= i; // j satisfies: 0 <= j <= i;
let mut j = i; let mut j = i;
unsafe { unsafe {
@ -1385,7 +1385,7 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
// .offset-ing. // .offset-ing.
for start in range_step(0, len, insertion) { for start in range_step(0, len, insertion) {
// start <= i < len; // start <= i < len;
for i in range(start, cmp::min(start + insertion, len)) { for i in start..cmp::min(start + insertion, len) {
// j satisfies: start <= j <= i; // j satisfies: start <= j <= i;
let mut j = i as int; let mut j = i as int;
unsafe { unsafe {
@ -1526,7 +1526,7 @@ mod tests {
#[test] #[test]
fn test_from_fn() { fn test_from_fn() {
// Test on-stack from_fn. // Test on-stack from_fn.
let mut v = range(0, 3).map(square).collect::<Vec<_>>(); let mut v = (0u..3).map(square).collect::<Vec<_>>();
{ {
let v = v.as_slice(); let v = v.as_slice();
assert_eq!(v.len(), 3u); assert_eq!(v.len(), 3u);
@ -1536,7 +1536,7 @@ mod tests {
} }
// Test on-heap from_fn. // Test on-heap from_fn.
v = range(0, 5).map(square).collect::<Vec<_>>(); v = (0u..5).map(square).collect::<Vec<_>>();
{ {
let v = v.as_slice(); let v = v.as_slice();
assert_eq!(v.len(), 5u); assert_eq!(v.len(), 5u);
@ -2097,8 +2097,8 @@ mod tests {
#[test] #[test]
fn test_sort() { fn test_sort() {
for len in range(4u, 25) { for len in 4u..25 {
for _ in range(0i, 100) { for _ in 0i..100 {
let mut v = thread_rng().gen_iter::<uint>().take(len) let mut v = thread_rng().gen_iter::<uint>().take(len)
.collect::<Vec<uint>>(); .collect::<Vec<uint>>();
let mut v1 = v.clone(); let mut v1 = v.clone();
@ -2125,8 +2125,8 @@ mod tests {
#[test] #[test]
fn test_sort_stability() { fn test_sort_stability() {
for len in range(4i, 25) { for len in 4i..25 {
for _ in range(0u, 10) { for _ in 0u..10 {
let mut counts = [0i; 10]; let mut counts = [0i; 10];
// create a vector like [(6, 1), (5, 1), (6, 2), ...], // create a vector like [(6, 1), (5, 1), (6, 2), ...],
@ -2134,7 +2134,7 @@ mod tests {
// the second item represents which occurrence of that // the second item represents which occurrence of that
// number this element is, i.e. the second elements // number this element is, i.e. the second elements
// will occur in sorted order. // will occur in sorted order.
let mut v = range(0, len).map(|_| { let mut v = (0..len).map(|_| {
let n = thread_rng().gen::<uint>() % 10; let n = thread_rng().gen::<uint>() % 10;
counts[n] += 1; counts[n] += 1;
(n, counts[n]) (n, counts[n])
@ -2717,13 +2717,13 @@ mod tests {
#[test] #[test]
fn test_shrink_to_fit() { fn test_shrink_to_fit() {
let mut xs = vec![0, 1, 2, 3]; let mut xs = vec![0, 1, 2, 3];
for i in range(4i, 100) { for i in 4i..100 {
xs.push(i) xs.push(i)
} }
assert_eq!(xs.capacity(), 128); assert_eq!(xs.capacity(), 128);
xs.shrink_to_fit(); xs.shrink_to_fit();
assert_eq!(xs.capacity(), 100); assert_eq!(xs.capacity(), 100);
assert_eq!(xs, range(0i, 100i).collect::<Vec<_>>()); assert_eq!(xs, (0i..100i).collect::<Vec<_>>());
} }
#[test] #[test]
@ -2854,7 +2854,7 @@ mod bench {
fn iterator(b: &mut Bencher) { fn iterator(b: &mut Bencher) {
// peculiar numbers to stop LLVM from optimising the summation // peculiar numbers to stop LLVM from optimising the summation
// out. // out.
let v = range(0u, 100).map(|i| i ^ (i << 1) ^ (i >> 1)).collect::<Vec<_>>(); let v = (0u..100).map(|i| i ^ (i << 1) ^ (i >> 1)).collect::<Vec<_>>();
b.iter(|| { b.iter(|| {
let mut sum = 0; let mut sum = 0;
@ -2882,7 +2882,7 @@ mod bench {
#[bench] #[bench]
fn concat(b: &mut Bencher) { fn concat(b: &mut Bencher) {
let xss: Vec<Vec<uint>> = let xss: Vec<Vec<uint>> =
range(0, 100u).map(|i| range(0, i).collect()).collect(); (0..100u).map(|i| (0..i).collect()).collect();
b.iter(|| { b.iter(|| {
xss.concat(); xss.concat();
}); });
@ -2891,7 +2891,7 @@ mod bench {
#[bench] #[bench]
fn connect(b: &mut Bencher) { fn connect(b: &mut Bencher) {
let xss: Vec<Vec<uint>> = let xss: Vec<Vec<uint>> =
range(0, 100u).map(|i| range(0, i).collect()).collect(); (0..100u).map(|i| (0..i).collect()).collect();
b.iter(|| { b.iter(|| {
xss.connect(&0) xss.connect(&0)
}); });
@ -2908,7 +2908,7 @@ mod bench {
#[bench] #[bench]
fn starts_with_same_vector(b: &mut Bencher) { fn starts_with_same_vector(b: &mut Bencher) {
let vec: Vec<uint> = range(0, 100).collect(); let vec: Vec<uint> = (0u..100).collect();
b.iter(|| { b.iter(|| {
vec.starts_with(vec.as_slice()) vec.starts_with(vec.as_slice())
}) })
@ -2924,8 +2924,8 @@ mod bench {
#[bench] #[bench]
fn starts_with_diff_one_element_at_end(b: &mut Bencher) { fn starts_with_diff_one_element_at_end(b: &mut Bencher) {
let vec: Vec<uint> = range(0, 100).collect(); let vec: Vec<uint> = (0u..100).collect();
let mut match_vec: Vec<uint> = range(0, 99).collect(); let mut match_vec: Vec<uint> = (0u..99).collect();
match_vec.push(0); match_vec.push(0);
b.iter(|| { b.iter(|| {
vec.starts_with(match_vec.as_slice()) vec.starts_with(match_vec.as_slice())
@ -2934,7 +2934,7 @@ mod bench {
#[bench] #[bench]
fn ends_with_same_vector(b: &mut Bencher) { fn ends_with_same_vector(b: &mut Bencher) {
let vec: Vec<uint> = range(0, 100).collect(); let vec: Vec<uint> = (0u..100).collect();
b.iter(|| { b.iter(|| {
vec.ends_with(vec.as_slice()) vec.ends_with(vec.as_slice())
}) })
@ -2950,8 +2950,8 @@ mod bench {
#[bench] #[bench]
fn ends_with_diff_one_element_at_beginning(b: &mut Bencher) { fn ends_with_diff_one_element_at_beginning(b: &mut Bencher) {
let vec: Vec<uint> = range(0, 100).collect(); let vec: Vec<uint> = (0u..100).collect();
let mut match_vec: Vec<uint> = range(0, 100).collect(); let mut match_vec: Vec<uint> = (0u..100).collect();
match_vec.as_mut_slice()[0] = 200; match_vec.as_mut_slice()[0] = 200;
b.iter(|| { b.iter(|| {
vec.starts_with(match_vec.as_slice()) vec.starts_with(match_vec.as_slice())
@ -2960,7 +2960,7 @@ mod bench {
#[bench] #[bench]
fn contains_last_element(b: &mut Bencher) { fn contains_last_element(b: &mut Bencher) {
let vec: Vec<uint> = range(0, 100).collect(); let vec: Vec<uint> = (0u..100).collect();
b.iter(|| { b.iter(|| {
vec.contains(&99u) vec.contains(&99u)
}) })
@ -2993,7 +2993,7 @@ mod bench {
unsafe { unsafe {
v.set_len(1024); v.set_len(1024);
} }
for i in range(0u, 1024) { for i in 0u..1024 {
v[i] = 0; v[i] = 0;
} }
}); });
@ -3018,7 +3018,7 @@ mod bench {
let mut rng = weak_rng(); let mut rng = weak_rng();
b.iter(|| { b.iter(|| {
let mut v = repeat((0u, 0u)).take(30).collect::<Vec<_>>(); let mut v = repeat((0u, 0u)).take(30).collect::<Vec<_>>();
for _ in range(0u, 100) { for _ in 0u..100 {
let l = v.len(); let l = v.len();
v.insert(rng.gen::<uint>() % (l + 1), v.insert(rng.gen::<uint>() % (l + 1),
(1, 1)); (1, 1));
@ -3030,7 +3030,7 @@ mod bench {
let mut rng = weak_rng(); let mut rng = weak_rng();
b.iter(|| { b.iter(|| {
let mut v = repeat((0u, 0u)).take(130).collect::<Vec<_>>(); let mut v = repeat((0u, 0u)).take(130).collect::<Vec<_>>();
for _ in range(0u, 100) { for _ in 0u..100 {
let l = v.len(); let l = v.len();
v.remove(rng.gen::<uint>() % l); v.remove(rng.gen::<uint>() % l);
} }
@ -3069,7 +3069,7 @@ mod bench {
#[bench] #[bench]
fn sort_sorted(b: &mut Bencher) { fn sort_sorted(b: &mut Bencher) {
let mut v = range(0u, 10000).collect::<Vec<_>>(); let mut v = (0u..10000).collect::<Vec<_>>();
b.iter(|| { b.iter(|| {
v.sort(); v.sort();
}); });
@ -3113,7 +3113,7 @@ mod bench {
#[bench] #[bench]
fn sort_big_sorted(b: &mut Bencher) { fn sort_big_sorted(b: &mut Bencher) {
let mut v = range(0, 10000u).map(|i| (i, i, i, i)).collect::<Vec<_>>(); let mut v = (0..10000u).map(|i| (i, i, i, i)).collect::<Vec<_>>();
b.iter(|| { b.iter(|| {
v.sort(); v.sort();
}); });

View file

@ -59,7 +59,7 @@ use core::borrow::{BorrowFrom, ToOwned};
use core::char::CharExt; use core::char::CharExt;
use core::clone::Clone; use core::clone::Clone;
use core::iter::AdditiveIterator; use core::iter::AdditiveIterator;
use core::iter::{range, Iterator, IteratorExt}; use core::iter::{Iterator, IteratorExt};
use core::ops::{FullRange, Index}; use core::ops::{FullRange, Index};
use core::option::Option::{self, Some, None}; use core::option::Option::{self, Some, None};
use core::slice::AsSlice; use core::slice::AsSlice;
@ -142,9 +142,9 @@ Section: Iterators
// Helper functions used for Unicode normalization // Helper functions used for Unicode normalization
fn canonical_sort(comb: &mut [(char, u8)]) { fn canonical_sort(comb: &mut [(char, u8)]) {
let len = comb.len(); let len = comb.len();
for i in range(0, len) { for i in 0..len {
let mut swapped = false; let mut swapped = false;
for j in range(1, len-i) { for j in 1..len-i {
let class_a = comb[j-1].1; let class_a = comb[j-1].1;
let class_b = comb[j].1; let class_b = comb[j].1;
if class_a != 0 && class_b != 0 && class_a > class_b { if class_a != 0 && class_b != 0 && class_a > class_b {
@ -2122,7 +2122,7 @@ mod tests {
#[test] #[test]
fn test_chars_decoding() { fn test_chars_decoding() {
let mut bytes = [0u8; 4]; let mut bytes = [0u8; 4];
for c in range(0u32, 0x110000).filter_map(|c| ::core::char::from_u32(c)) { for c in (0u32..0x110000).filter_map(|c| ::core::char::from_u32(c)) {
let len = c.encode_utf8(&mut bytes).unwrap_or(0); let len = c.encode_utf8(&mut bytes).unwrap_or(0);
let s = ::core::str::from_utf8(&bytes[..len]).unwrap(); let s = ::core::str::from_utf8(&bytes[..len]).unwrap();
if Some(c) != s.chars().next() { if Some(c) != s.chars().next() {
@ -2134,7 +2134,7 @@ mod tests {
#[test] #[test]
fn test_chars_rev_decoding() { fn test_chars_rev_decoding() {
let mut bytes = [0u8; 4]; let mut bytes = [0u8; 4];
for c in range(0u32, 0x110000).filter_map(|c| ::core::char::from_u32(c)) { for c in (0u32..0x110000).filter_map(|c| ::core::char::from_u32(c)) {
let len = c.encode_utf8(&mut bytes).unwrap_or(0); let len = c.encode_utf8(&mut bytes).unwrap_or(0);
let s = ::core::str::from_utf8(&bytes[..len]).unwrap(); let s = ::core::str::from_utf8(&bytes[..len]).unwrap();
if Some(c) != s.chars().rev().next() { if Some(c) != s.chars().rev().next() {

View file

@ -41,7 +41,7 @@ pub struct String {
/// A possible error value from the `String::from_utf8` function. /// A possible error value from the `String::from_utf8` function.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[derive(Show)] #[derive(Debug)]
pub struct FromUtf8Error { pub struct FromUtf8Error {
bytes: Vec<u8>, bytes: Vec<u8>,
error: Utf8Error, error: Utf8Error,
@ -50,7 +50,7 @@ pub struct FromUtf8Error {
/// A possible error value from the `String::from_utf16` function. /// A possible error value from the `String::from_utf16` function.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_copy_implementations)] #[allow(missing_copy_implementations)]
#[derive(Show)] #[derive(Debug)]
pub struct FromUtf16Error(()); pub struct FromUtf16Error(());
impl String { impl String {
@ -1354,7 +1354,7 @@ mod tests {
b.bytes = REPETITIONS; b.bytes = REPETITIONS;
b.iter(|| { b.iter(|| {
let mut r = String::new(); let mut r = String::new();
for _ in range(0, REPETITIONS) { for _ in 0..REPETITIONS {
r.push_str("a") r.push_str("a")
} }
}); });
@ -1365,7 +1365,7 @@ mod tests {
b.bytes = REPETITIONS; b.bytes = REPETITIONS;
b.iter(|| { b.iter(|| {
let mut r = String::new(); let mut r = String::new();
for _ in range(0, REPETITIONS) { for _ in 0..REPETITIONS {
r.push('a') r.push('a')
} }
}); });
@ -1376,7 +1376,7 @@ mod tests {
b.bytes = REPETITIONS * 2; b.bytes = REPETITIONS * 2;
b.iter(|| { b.iter(|| {
let mut r = String::new(); let mut r = String::new();
for _ in range(0, REPETITIONS) { for _ in 0..REPETITIONS {
r.push('â') r.push('â')
} }
}); });

View file

@ -186,7 +186,7 @@ impl<T> Vec<T> {
/// assert_eq!(vec.len(), 0); /// assert_eq!(vec.len(), 0);
/// ///
/// // These are all done without reallocating... /// // These are all done without reallocating...
/// for i in range(0i, 10) { /// for i in 0i..10 {
/// vec.push(i); /// vec.push(i);
/// } /// }
/// ///
@ -233,7 +233,7 @@ impl<T> Vec<T> {
/// mem::forget(v); /// mem::forget(v);
/// ///
/// // Overwrite memory with 4, 5, 6 /// // Overwrite memory with 4, 5, 6
/// for i in range(0, len as int) { /// for i in 0..len as int {
/// ptr::write(p.offset(i), 4 + i); /// ptr::write(p.offset(i), 4 + i);
/// } /// }
/// ///
@ -605,7 +605,7 @@ impl<T> Vec<T> {
{ {
let v = self.as_mut_slice(); let v = self.as_mut_slice();
for i in range(0u, len) { for i in 0u..len {
if !f(&v[i]) { if !f(&v[i]) {
del += 1; del += 1;
} else if del > 0 { } else if del > 0 {
@ -811,7 +811,7 @@ impl<T> Vec<T> {
/// let w = v.map_in_place(|i| i + 3); /// let w = v.map_in_place(|i| i + 3);
/// assert_eq!(w.as_slice(), [3, 4, 5].as_slice()); /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
/// ///
/// #[derive(PartialEq, Show)] /// #[derive(PartialEq, Debug)]
/// struct Newtype(u8); /// struct Newtype(u8);
/// let bytes = vec![0x11, 0x22]; /// let bytes = vec![0x11, 0x22];
/// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x)); /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
@ -1079,7 +1079,7 @@ impl<T: Clone> Vec<T> {
pub fn push_all(&mut self, other: &[T]) { pub fn push_all(&mut self, other: &[T]) {
self.reserve(other.len()); self.reserve(other.len());
for i in range(0, other.len()) { for i in 0..other.len() {
let len = self.len(); let len = self.len();
// Unsafe code so this can be optimised to a memcpy (or something similarly // Unsafe code so this can be optimised to a memcpy (or something similarly
@ -1969,7 +1969,7 @@ mod tests {
v.reserve(2); v.reserve(2);
assert!(v.capacity() >= 2); assert!(v.capacity() >= 2);
for i in range(0i, 16) { for i in 0i..16 {
v.push(i); v.push(i);
} }
@ -1988,13 +1988,13 @@ mod tests {
let mut v = Vec::new(); let mut v = Vec::new();
let mut w = Vec::new(); let mut w = Vec::new();
v.extend(range(0i, 3)); v.extend(0i..3);
for i in range(0i, 3) { w.push(i) } for i in 0i..3 { w.push(i) }
assert_eq!(v, w); assert_eq!(v, w);
v.extend(range(3i, 10)); v.extend(3i..10);
for i in range(3i, 10) { w.push(i) } for i in 3i..10 { w.push(i) }
assert_eq!(v, w); assert_eq!(v, w);
} }
@ -2279,7 +2279,7 @@ mod tests {
#[test] #[test]
fn test_map_in_place_zero_sized() { fn test_map_in_place_zero_sized() {
let v = vec![(), ()]; let v = vec![(), ()];
#[derive(PartialEq, Show)] #[derive(PartialEq, Debug)]
struct ZeroSized; struct ZeroSized;
assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]); assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
} }
@ -2288,11 +2288,11 @@ mod tests {
fn test_map_in_place_zero_drop_count() { fn test_map_in_place_zero_drop_count() {
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
struct Nothing; struct Nothing;
impl Drop for Nothing { fn drop(&mut self) { } } impl Drop for Nothing { fn drop(&mut self) { } }
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
struct ZeroSized; struct ZeroSized;
impl Drop for ZeroSized { impl Drop for ZeroSized {
fn drop(&mut self) { fn drop(&mut self) {
@ -2442,7 +2442,7 @@ mod tests {
b.bytes = src_len as u64; b.bytes = src_len as u64;
b.iter(|| { b.iter(|| {
let dst = range(0, src_len).collect::<Vec<_>>(); let dst = (0..src_len).collect::<Vec<_>>();
assert_eq!(dst.len(), src_len); assert_eq!(dst.len(), src_len);
assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
}) })
@ -2499,7 +2499,7 @@ mod tests {
} }
fn do_bench_from_slice(b: &mut Bencher, src_len: uint) { fn do_bench_from_slice(b: &mut Bencher, src_len: uint) {
let src: Vec<uint> = FromIterator::from_iter(range(0, src_len)); let src: Vec<uint> = FromIterator::from_iter(0..src_len);
b.bytes = src_len as u64; b.bytes = src_len as u64;
@ -2531,7 +2531,7 @@ mod tests {
} }
fn do_bench_from_iter(b: &mut Bencher, src_len: uint) { fn do_bench_from_iter(b: &mut Bencher, src_len: uint) {
let src: Vec<uint> = FromIterator::from_iter(range(0, src_len)); let src: Vec<uint> = FromIterator::from_iter(0..src_len);
b.bytes = src_len as u64; b.bytes = src_len as u64;
@ -2563,8 +2563,8 @@ mod tests {
} }
fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) { fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len)); let dst: Vec<uint> = FromIterator::from_iter(0..dst_len);
let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = src_len as u64; b.bytes = src_len as u64;
@ -2612,8 +2612,8 @@ mod tests {
} }
fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) { fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len)); let dst: Vec<uint> = FromIterator::from_iter(0..dst_len);
let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = src_len as u64; b.bytes = src_len as u64;
@ -2661,8 +2661,8 @@ mod tests {
} }
fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) { fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(range(0u, dst_len)); let dst: Vec<uint> = FromIterator::from_iter(0u..dst_len);
let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = src_len as u64; b.bytes = src_len as u64;
@ -2710,7 +2710,7 @@ mod tests {
} }
fn do_bench_clone(b: &mut Bencher, src_len: uint) { fn do_bench_clone(b: &mut Bencher, src_len: uint) {
let src: Vec<uint> = FromIterator::from_iter(range(0, src_len)); let src: Vec<uint> = FromIterator::from_iter(0..src_len);
b.bytes = src_len as u64; b.bytes = src_len as u64;
@ -2742,15 +2742,15 @@ mod tests {
} }
fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) { fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(range(0, src_len)); let dst: Vec<uint> = FromIterator::from_iter(0..src_len);
let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len)); let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = (times * src_len) as u64; b.bytes = (times * src_len) as u64;
b.iter(|| { b.iter(|| {
let mut dst = dst.clone(); let mut dst = dst.clone();
for _ in range(0, times) { for _ in 0..times {
dst.clone_from(&src); dst.clone_from(&src);
assert_eq!(dst.len(), src_len); assert_eq!(dst.len(), src_len);

View file

@ -459,7 +459,7 @@ impl<V> VecMap<V> {
pub fn insert(&mut self, key: uint, value: V) -> Option<V> { pub fn insert(&mut self, key: uint, value: V) -> Option<V> {
let len = self.v.len(); let len = self.v.len();
if len <= key { if len <= key {
self.v.extend(range(0, key - len + 1).map(|_| None)); self.v.extend((0..key - len + 1).map(|_| None));
} }
replace(&mut self.v[key], Some(value)) replace(&mut self.v[key], Some(value))
} }

View file

@ -166,8 +166,7 @@ impl Any {
/// ///
/// A `TypeId` is currently only available for types which ascribe to `'static`, /// A `TypeId` is currently only available for types which ascribe to `'static`,
/// but this limitation may be removed in the future. /// but this limitation may be removed in the future.
#[cfg_attr(stage0, lang = "type_id")] #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
#[derive(Clone, Copy, PartialEq, Eq, Show, Hash)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct TypeId { pub struct TypeId {
t: u64, t: u64,

View file

@ -125,7 +125,7 @@ impl<T> ToOwned<T> for T where T: Clone {
/// use std::borrow::Cow; /// use std::borrow::Cow;
/// ///
/// fn abs_all(input: &mut Cow<Vec<int>, [int]>) { /// fn abs_all(input: &mut Cow<Vec<int>, [int]>) {
/// for i in range(0, input.len()) { /// for i in 0..input.len() {
/// let v = input[i]; /// let v = input[i];
/// if v < 0 { /// if v < 0 {
/// // clones into a vector the first time (if not already owned) /// // clones into a vector the first time (if not already owned)

View file

@ -105,7 +105,7 @@ pub trait Eq: PartialEq<Self> {
} }
/// An ordering is, e.g, a result of a comparison between two values. /// An ordering is, e.g, a result of a comparison between two values.
#[derive(Clone, Copy, PartialEq, Show)] #[derive(Clone, Copy, PartialEq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub enum Ordering { pub enum Ordering {
/// An ordering where a compared value is less [than another]. /// An ordering where a compared value is less [than another].

View file

@ -17,7 +17,7 @@ pub use self::SignFormat::*;
use char; use char;
use char::CharExt; use char::CharExt;
use fmt; use fmt;
use iter::{IteratorExt, range}; use iter::IteratorExt;
use num::{cast, Float, ToPrimitive}; use num::{cast, Float, ToPrimitive};
use num::FpCategory as Fp; use num::FpCategory as Fp;
use ops::FnOnce; use ops::FnOnce;
@ -242,7 +242,7 @@ pub fn float_to_str_bytes_common<T: Float, U, F>(
if i < 0 if i < 0
|| buf[i as uint] == b'-' || buf[i as uint] == b'-'
|| buf[i as uint] == b'+' { || buf[i as uint] == b'+' {
for j in range(i as uint + 1, end).rev() { for j in (i as uint + 1..end).rev() {
buf[j + 1] = buf[j]; buf[j + 1] = buf[j];
} }
buf[(i + 1) as uint] = value2ascii(1); buf[(i + 1) as uint] = value2ascii(1);

View file

@ -16,7 +16,7 @@
use any; use any;
use cell::{Cell, RefCell, Ref, RefMut}; use cell::{Cell, RefCell, Ref, RefMut};
use char::CharExt; use char::CharExt;
use iter::{Iterator, IteratorExt, range}; use iter::{Iterator, IteratorExt};
use marker::{Copy, Sized}; use marker::{Copy, Sized};
use mem; use mem;
use option::Option; use option::Option;
@ -32,9 +32,6 @@ pub use self::num::radix;
pub use self::num::Radix; pub use self::num::Radix;
pub use self::num::RadixFmt; pub use self::num::RadixFmt;
#[cfg(stage0)] pub use self::Debug as Show;
#[cfg(stage0)] pub use self::Display as String;
mod num; mod num;
mod float; mod float;
pub mod rt; pub mod rt;
@ -51,7 +48,7 @@ pub type Result = result::Result<(), Error>;
/// some other means. /// some other means.
#[unstable(feature = "core", #[unstable(feature = "core",
reason = "core and I/O reconciliation may alter this definition")] reason = "core and I/O reconciliation may alter this definition")]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub struct Error; pub struct Error;
/// A collection of methods that are required to format a message into a stream. /// A collection of methods that are required to format a message into a stream.
@ -243,7 +240,6 @@ impl<'a> Display for Arguments<'a> {
#[unstable(feature = "core", #[unstable(feature = "core",
reason = "I/O and core have yet to be reconciled")] reason = "I/O and core have yet to be reconciled")]
#[deprecated(since = "1.0.0", reason = "renamed to Debug")] #[deprecated(since = "1.0.0", reason = "renamed to Debug")]
#[cfg(not(stage0))]
pub trait Show { pub trait Show {
/// Formats the value using the given formatter. /// Formats the value using the given formatter.
fn fmt(&self, &mut Formatter) -> Result; fn fmt(&self, &mut Formatter) -> Result;
@ -261,7 +257,6 @@ pub trait Debug {
fn fmt(&self, &mut Formatter) -> Result; fn fmt(&self, &mut Formatter) -> Result;
} }
#[cfg(not(stage0))]
impl<T: Show + ?Sized> Debug for T { impl<T: Show + ?Sized> Debug for T {
#[allow(deprecated)] #[allow(deprecated)]
fn fmt(&self, f: &mut Formatter) -> Result { Show::fmt(self, f) } fn fmt(&self, f: &mut Formatter) -> Result { Show::fmt(self, f) }
@ -271,7 +266,6 @@ impl<T: Show + ?Sized> Debug for T {
/// used. It corresponds to the default format, `{}`. /// used. It corresponds to the default format, `{}`.
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[deprecated(since = "1.0.0", reason = "renamed to Display")] #[deprecated(since = "1.0.0", reason = "renamed to Display")]
#[cfg(not(stage0))]
pub trait String { pub trait String {
/// Formats the value using the given formatter. /// Formats the value using the given formatter.
fn fmt(&self, &mut Formatter) -> Result; fn fmt(&self, &mut Formatter) -> Result;
@ -288,7 +282,6 @@ pub trait Display {
fn fmt(&self, &mut Formatter) -> Result; fn fmt(&self, &mut Formatter) -> Result;
} }
#[cfg(not(stage0))]
impl<T: String + ?Sized> Display for T { impl<T: String + ?Sized> Display for T {
#[allow(deprecated)] #[allow(deprecated)]
fn fmt(&self, f: &mut Formatter) -> Result { String::fmt(self, f) } fn fmt(&self, f: &mut Formatter) -> Result { String::fmt(self, f) }
@ -596,13 +589,13 @@ impl<'a> Formatter<'a> {
let len = self.fill.encode_utf8(&mut fill).unwrap_or(0); let len = self.fill.encode_utf8(&mut fill).unwrap_or(0);
let fill = unsafe { str::from_utf8_unchecked(&fill[..len]) }; let fill = unsafe { str::from_utf8_unchecked(&fill[..len]) };
for _ in range(0, pre_pad) { for _ in 0..pre_pad {
try!(self.buf.write_str(fill)); try!(self.buf.write_str(fill));
} }
try!(f(self)); try!(f(self));
for _ in range(0, post_pad) { for _ in 0..post_pad {
try!(self.buf.write_str(fill)); try!(self.buf.write_str(fill));
} }

View file

@ -197,12 +197,8 @@ extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint; pub fn pref_align_of<T>() -> uint;
/// Get a static pointer to a type descriptor. /// Get a static pointer to a type descriptor.
#[cfg(not(stage0))]
pub fn get_tydesc<T: ?Sized>() -> *const TyDesc; pub fn get_tydesc<T: ?Sized>() -> *const TyDesc;
#[cfg(stage0)]
pub fn get_tydesc<T>() -> *const TyDesc;
/// Gets an identifier which is globally unique to the specified type. This /// Gets an identifier which is globally unique to the specified type. This
/// function will return the same value for a type regardless of whichever /// function will return the same value for a type regardless of whichever
/// crate it is invoked in. /// crate it is invoked in.

View file

@ -101,8 +101,6 @@ pub trait Iterator {
fn size_hint(&self) -> (usize, Option<usize>) { (0, None) } fn size_hint(&self) -> (usize, Option<usize>) { (0, None) }
} }
// FIXME(#21363) remove `old_impl_check` when bug is fixed
#[old_impl_check]
impl<'a, T> Iterator for &'a mut (Iterator<Item=T> + 'a) { impl<'a, T> Iterator for &'a mut (Iterator<Item=T> + 'a) {
type Item = T; type Item = T;
@ -717,7 +715,7 @@ pub trait IteratorExt: Iterator + Sized {
Self: ExactSizeIterator + DoubleEndedIterator Self: ExactSizeIterator + DoubleEndedIterator
{ {
let len = self.len(); let len = self.len();
for i in range(0, len).rev() { for i in (0..len).rev() {
if predicate(self.next_back().expect("rposition: incorrect ExactSizeIterator")) { if predicate(self.next_back().expect("rposition: incorrect ExactSizeIterator")) {
return Some(i); return Some(i);
} }
@ -1226,7 +1224,7 @@ impl_multiplicative! { f32, 1.0 }
impl_multiplicative! { f64, 1.0 } impl_multiplicative! { f64, 1.0 }
/// `MinMaxResult` is an enum returned by `min_max`. See `IteratorOrdExt::min_max` for more detail. /// `MinMaxResult` is an enum returned by `min_max`. See `IteratorOrdExt::min_max` for more detail.
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
#[unstable(feature = "core", #[unstable(feature = "core",
reason = "unclear whether such a fine-grained result is widely useful")] reason = "unclear whether such a fine-grained result is widely useful")]
pub enum MinMaxResult<T> { pub enum MinMaxResult<T> {
@ -1509,9 +1507,9 @@ impl<T, U, A, B> DoubleEndedIterator for Zip<A, B> where
if a_sz != b_sz { if a_sz != b_sz {
// Adjust a, b to equal length // Adjust a, b to equal length
if a_sz > b_sz { if a_sz > b_sz {
for _ in range(0, a_sz - b_sz) { self.a.next_back(); } for _ in 0..a_sz - b_sz { self.a.next_back(); }
} else { } else {
for _ in range(0, b_sz - a_sz) { self.b.next_back(); } for _ in 0..b_sz - a_sz { self.b.next_back(); }
} }
} }
match (self.a.next_back(), self.b.next_back()) { match (self.a.next_back(), self.b.next_back()) {

View file

@ -64,8 +64,6 @@
#![feature(unboxed_closures)] #![feature(unboxed_closures)]
#![allow(unknown_features)] #![feature(int_uint)] #![allow(unknown_features)] #![feature(int_uint)]
#![feature(on_unimplemented)] #![feature(on_unimplemented)]
// FIXME(#21363) remove `old_impl_check` when bug is fixed
#![feature(old_impl_check)]
#![deny(missing_docs)] #![deny(missing_docs)]
#[macro_use] #[macro_use]

View file

@ -50,7 +50,7 @@ pub trait Sized {
/// words: /// words:
/// ///
/// ``` /// ```
/// #[derive(Show)] /// #[derive(Debug)]
/// struct Foo; /// struct Foo;
/// ///
/// let x = Foo; /// let x = Foo;
@ -66,7 +66,7 @@ pub trait Sized {
/// ///
/// ``` /// ```
/// // we can just derive a `Copy` implementation /// // we can just derive a `Copy` implementation
/// #[derive(Show, Copy)] /// #[derive(Debug, Copy)]
/// struct Foo; /// struct Foo;
/// ///
/// let x = Foo; /// let x = Foo;

View file

@ -31,7 +31,7 @@ unsafe impl Zeroable for u64 {}
/// A wrapper type for raw pointers and integers that will never be /// A wrapper type for raw pointers and integers that will never be
/// NULL or 0 that might allow certain optimizations. /// NULL or 0 that might allow certain optimizations.
#[lang="non_zero"] #[lang="non_zero"]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Show, Hash)] #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]
#[unstable(feature = "core")] #[unstable(feature = "core")]
pub struct NonZero<T: Zeroable>(T); pub struct NonZero<T: Zeroable>(T);

View file

@ -1241,7 +1241,7 @@ impl_num_cast! { f32, to_f32 }
impl_num_cast! { f64, to_f64 } impl_num_cast! { f64, to_f64 }
/// Used for representing the classification of floating point numbers /// Used for representing the classification of floating point numbers
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
#[unstable(feature = "core", reason = "may be renamed")] #[unstable(feature = "core", reason = "may be renamed")]
pub enum FpCategory { pub enum FpCategory {
/// "Not a Number", often obtained by dividing by zero /// "Not a Number", often obtained by dividing by zero

View file

@ -35,7 +35,7 @@
//! ```rust //! ```rust
//! use std::ops::{Add, Sub}; //! use std::ops::{Add, Sub};
//! //!
//! #[derive(Show)] //! #[derive(Debug)]
//! struct Point { //! struct Point {
//! x: int, //! x: int,
//! y: int //! y: int

View file

@ -163,7 +163,7 @@ use slice;
// which basically means it must be `Option`. // which basically means it must be `Option`.
/// The `Option` type. /// The `Option` type.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Show, Hash)] #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub enum Option<T> { pub enum Option<T> {
/// No value /// No value

View file

@ -30,7 +30,7 @@
//! defined and used like so: //! defined and used like so:
//! //!
//! ``` //! ```
//! #[derive(Show)] //! #[derive(Debug)]
//! enum Version { Version1, Version2 } //! enum Version { Version1, Version2 }
//! //!
//! fn parse_version(header: &[u8]) -> Result<Version, &'static str> { //! fn parse_version(header: &[u8]) -> Result<Version, &'static str> {
@ -239,7 +239,7 @@ use slice;
/// `Result` is a type that represents either success (`Ok`) or failure (`Err`). /// `Result` is a type that represents either success (`Ok`) or failure (`Err`).
/// ///
/// See the [`std::result`](index.html) module documentation for details. /// See the [`std::result`](index.html) module documentation for details.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Show, Hash)] #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[must_use] #[must_use]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub enum Result<T, E> { pub enum Result<T, E> {

View file

@ -38,7 +38,7 @@
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct i8x16(pub i8, pub i8, pub i8, pub i8, pub struct i8x16(pub i8, pub i8, pub i8, pub i8,
pub i8, pub i8, pub i8, pub i8, pub i8, pub i8, pub i8, pub i8,
@ -47,26 +47,26 @@ pub struct i8x16(pub i8, pub i8, pub i8, pub i8,
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct i16x8(pub i16, pub i16, pub i16, pub i16, pub struct i16x8(pub i16, pub i16, pub i16, pub i16,
pub i16, pub i16, pub i16, pub i16); pub i16, pub i16, pub i16, pub i16);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct i32x4(pub i32, pub i32, pub i32, pub i32); pub struct i32x4(pub i32, pub i32, pub i32, pub i32);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct i64x2(pub i64, pub i64); pub struct i64x2(pub i64, pub i64);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct u8x16(pub u8, pub u8, pub u8, pub u8, pub struct u8x16(pub u8, pub u8, pub u8, pub u8,
pub u8, pub u8, pub u8, pub u8, pub u8, pub u8, pub u8, pub u8,
@ -75,31 +75,31 @@ pub struct u8x16(pub u8, pub u8, pub u8, pub u8,
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct u16x8(pub u16, pub u16, pub u16, pub u16, pub struct u16x8(pub u16, pub u16, pub u16, pub u16,
pub u16, pub u16, pub u16, pub u16); pub u16, pub u16, pub u16, pub u16);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct u32x4(pub u32, pub u32, pub u32, pub u32); pub struct u32x4(pub u32, pub u32, pub u32, pub u32);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct u64x2(pub u64, pub u64); pub struct u64x2(pub u64, pub u64);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct f32x4(pub f32, pub f32, pub f32, pub f32); pub struct f32x4(pub f32, pub f32, pub f32, pub f32);
#[unstable(feature = "core")] #[unstable(feature = "core")]
#[simd] #[simd]
#[derive(Copy, Show)] #[derive(Copy, Debug)]
#[repr(C)] #[repr(C)]
pub struct f64x2(pub f64, pub f64); pub struct f64x2(pub f64, pub f64);

View file

@ -482,7 +482,7 @@ impl<T> SliceExt for [T] {
let min = cmp::min(self.len(), src.len()); let min = cmp::min(self.len(), src.len());
let dst = &mut self[.. min]; let dst = &mut self[.. min];
let src = &src[.. min]; let src = &src[.. min];
for i in range(0, min) { for i in 0..min {
dst[i].clone_from(&src[i]); dst[i].clone_from(&src[i]);
} }
min min

View file

@ -23,7 +23,6 @@ use default::Default;
use error::Error; use error::Error;
use fmt; use fmt;
use iter::ExactSizeIterator; use iter::ExactSizeIterator;
use iter::range;
use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator}; use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator};
use marker::Sized; use marker::Sized;
use mem; use mem;
@ -145,7 +144,7 @@ Section: Creating a string
*/ */
/// Errors which can occur when attempting to interpret a byte slice as a `str`. /// Errors which can occur when attempting to interpret a byte slice as a `str`.
#[derive(Copy, Eq, PartialEq, Clone, Show)] #[derive(Copy, Eq, PartialEq, Clone, Debug)]
#[unstable(feature = "core", #[unstable(feature = "core",
reason = "error enumeration recently added and definitions may be refined")] reason = "error enumeration recently added and definitions may be refined")]
pub enum Utf8Error { pub enum Utf8Error {
@ -800,7 +799,7 @@ impl TwoWaySearcher {
// See if the right part of the needle matches // See if the right part of the needle matches
let start = if long_period { self.crit_pos } let start = if long_period { self.crit_pos }
else { cmp::max(self.crit_pos, self.memory) }; else { cmp::max(self.crit_pos, self.memory) };
for i in range(start, needle.len()) { for i in start..needle.len() {
if needle[i] != haystack[self.position + i] { if needle[i] != haystack[self.position + i] {
self.position += i - self.crit_pos + 1; self.position += i - self.crit_pos + 1;
if !long_period { if !long_period {
@ -812,7 +811,7 @@ impl TwoWaySearcher {
// See if the left part of the needle matches // See if the left part of the needle matches
let start = if long_period { 0 } else { self.memory }; let start = if long_period { 0 } else { self.memory };
for i in range(start, self.crit_pos).rev() { for i in (start..self.crit_pos).rev() {
if needle[i] != haystack[self.position + i] { if needle[i] != haystack[self.position + i] {
self.position += self.period; self.position += self.period;
if !long_period { if !long_period {

View file

@ -11,7 +11,7 @@ use core::any::*;
use test::Bencher; use test::Bencher;
use test; use test;
#[derive(PartialEq, Show)] #[derive(PartialEq, Debug)]
struct Test; struct Test;
static TEST: &'static str = "Test"; static TEST: &'static str = "Test";

View file

@ -305,7 +305,7 @@ fn test_cycle() {
#[test] #[test]
fn test_iterator_nth() { fn test_iterator_nth() {
let v: &[_] = &[0i, 1, 2, 3, 4]; let v: &[_] = &[0i, 1, 2, 3, 4];
for i in range(0u, v.len()) { for i in 0u..v.len() {
assert_eq!(v.iter().nth(i).unwrap(), &v[i]); assert_eq!(v.iter().nth(i).unwrap(), &v[i]);
} }
assert_eq!(v.iter().nth(v.len()), None); assert_eq!(v.iter().nth(v.len()), None);
@ -458,7 +458,7 @@ fn test_min_by() {
#[test] #[test]
fn test_by_ref() { fn test_by_ref() {
let mut xs = range(0i, 10); let mut xs = 0i..10;
// sum the first five values // sum the first five values
let partial_sum = xs.by_ref().take(5).fold(0, |a, b| a + b); let partial_sum = xs.by_ref().take(5).fold(0, |a, b| a + b);
assert_eq!(partial_sum, 10); assert_eq!(partial_sum, 10);
@ -730,32 +730,32 @@ fn test_random_access_cycle() {
#[test] #[test]
fn test_double_ended_range() { fn test_double_ended_range() {
assert!(range(11i, 14).rev().collect::<Vec<int>>() == vec![13i, 12, 11]); assert!((11i..14).rev().collect::<Vec<int>>() == vec![13i, 12, 11]);
for _ in range(10i, 0).rev() { for _ in (10i..0).rev() {
panic!("unreachable"); panic!("unreachable");
} }
assert!(range(11u, 14).rev().collect::<Vec<uint>>() == vec![13u, 12, 11]); assert!((11u..14).rev().collect::<Vec<uint>>() == vec![13u, 12, 11]);
for _ in range(10u, 0).rev() { for _ in (10u..0).rev() {
panic!("unreachable"); panic!("unreachable");
} }
} }
#[test] #[test]
fn test_range() { fn test_range() {
assert!(range(0i, 5).collect::<Vec<int>>() == vec![0i, 1, 2, 3, 4]); assert!((0i..5).collect::<Vec<int>>() == vec![0i, 1, 2, 3, 4]);
assert!(range(-10i, -1).collect::<Vec<int>>() == assert!((-10i..-1).collect::<Vec<int>>() ==
vec![-10, -9, -8, -7, -6, -5, -4, -3, -2]); vec![-10, -9, -8, -7, -6, -5, -4, -3, -2]);
assert!(range(0i, 5).rev().collect::<Vec<int>>() == vec![4, 3, 2, 1, 0]); assert!((0i..5).rev().collect::<Vec<int>>() == vec![4, 3, 2, 1, 0]);
assert_eq!(range(200i, -5).count(), 0); assert_eq!((200i..-5).count(), 0);
assert_eq!(range(200i, -5).rev().count(), 0); assert_eq!((200i..-5).rev().count(), 0);
assert_eq!(range(200i, 200).count(), 0); assert_eq!((200i..200).count(), 0);
assert_eq!(range(200i, 200).rev().count(), 0); assert_eq!((200i..200).rev().count(), 0);
assert_eq!(range(0i, 100).size_hint(), (100, Some(100))); assert_eq!((0i..100).size_hint(), (100, Some(100)));
// this test is only meaningful when sizeof uint < sizeof u64 // this test is only meaningful when sizeof uint < sizeof u64
assert_eq!(range(uint::MAX - 1, uint::MAX).size_hint(), (1, Some(1))); assert_eq!((uint::MAX - 1..uint::MAX).size_hint(), (1, Some(1)));
assert_eq!(range(-10i, -1).size_hint(), (9, Some(9))); assert_eq!((-10i..-1).size_hint(), (9, Some(9)));
} }
#[test] #[test]
@ -883,7 +883,7 @@ fn test_fuse() {
#[bench] #[bench]
fn bench_rposition(b: &mut Bencher) { fn bench_rposition(b: &mut Bencher) {
let it: Vec<uint> = range(0u, 300).collect(); let it: Vec<uint> = (0u..300).collect();
b.iter(|| { b.iter(|| {
it.iter().rposition(|&x| x <= 150); it.iter().rposition(|&x| x <= 150);
}); });
@ -892,7 +892,7 @@ fn bench_rposition(b: &mut Bencher) {
#[bench] #[bench]
fn bench_skip_while(b: &mut Bencher) { fn bench_skip_while(b: &mut Bencher) {
b.iter(|| { b.iter(|| {
let it = range(0u, 100); let it = 0u..100;
let mut sum = 0; let mut sum = 0;
it.skip_while(|&x| { sum += x; sum < 4000 }).all(|_| true); it.skip_while(|&x| { sum += x; sum < 4000 }).all(|_| true);
}); });
@ -900,10 +900,10 @@ fn bench_skip_while(b: &mut Bencher) {
#[bench] #[bench]
fn bench_multiple_take(b: &mut Bencher) { fn bench_multiple_take(b: &mut Bencher) {
let mut it = range(0u, 42).cycle(); let mut it = (0u..42).cycle();
b.iter(|| { b.iter(|| {
let n = it.next().unwrap(); let n = it.next().unwrap();
for _ in range(0u, n) { for _ in 0u..n {
it.take(it.next().unwrap()).all(|_| true); it.take(it.next().unwrap()).all(|_| true);
} }
}); });

View file

@ -223,13 +223,13 @@ fn test_ord() {
/* FIXME(#20575) /* FIXME(#20575)
#[test] #[test]
fn test_collect() { fn test_collect() {
let v: Option<Vec<int>> = range(0i, 0).map(|_| Some(0i)).collect(); let v: Option<Vec<int>> = (0i..0).map(|_| Some(0i)).collect();
assert!(v == Some(vec![])); assert!(v == Some(vec![]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| Some(x)).collect(); let v: Option<Vec<int>> = (0i..3).map(|x| Some(x)).collect();
assert!(v == Some(vec![0, 1, 2])); assert!(v == Some(vec![0, 1, 2]));
let v: Option<Vec<int>> = range(0i, 3).map(|x| { let v: Option<Vec<int>> = (0i..3).map(|x| {
if x > 1 { None } else { Some(x) } if x > 1 { None } else { Some(x) }
}).collect(); }).collect();
assert!(v == None); assert!(v == None);

View file

@ -68,13 +68,13 @@ pub fn test_impl_map_err() {
/* FIXME(#20575) /* FIXME(#20575)
#[test] #[test]
fn test_collect() { fn test_collect() {
let v: Result<Vec<int>, ()> = range(0i, 0).map(|_| Ok::<int, ()>(0)).collect(); let v: Result<Vec<int>, ()> = (0i..0).map(|_| Ok::<int, ()>(0)).collect();
assert!(v == Ok(vec![])); assert!(v == Ok(vec![]));
let v: Result<Vec<int>, ()> = range(0i, 3).map(|x| Ok::<int, ()>(x)).collect(); let v: Result<Vec<int>, ()> = (0i..3).map(|x| Ok::<int, ()>(x)).collect();
assert!(v == Ok(vec![0, 1, 2])); assert!(v == Ok(vec![0, 1, 2]));
let v: Result<Vec<int>, int> = range(0i, 3).map(|x| { let v: Result<Vec<int>, int> = (0i..3).map(|x| {
if x > 1 { Err(x) } else { Ok(x) } if x > 1 { Err(x) } else { Ok(x) }
}).collect(); }).collect();
assert!(v == Err(2)); assert!(v == Err(2));

View file

@ -17,7 +17,7 @@ fn test_bool_from_str() {
fn check_contains_all_substrings(s: &str) { fn check_contains_all_substrings(s: &str) {
assert!(s.contains("")); assert!(s.contains(""));
for i in range(0, s.len()) { for i in 0..s.len() {
for j in range(i+1, s.len() + 1) { for j in range(i+1, s.len() + 1) {
assert!(s.contains(&s[i..j])); assert!(s.contains(&s[i..j]));
} }

View file

@ -138,14 +138,14 @@ mod tests {
fn test_flate_round_trip() { fn test_flate_round_trip() {
let mut r = rand::thread_rng(); let mut r = rand::thread_rng();
let mut words = vec!(); let mut words = vec!();
for _ in range(0u, 20) { for _ in 0u..20 {
let range = r.gen_range(1u, 10); let range = r.gen_range(1u, 10);
let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>(); let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>();
words.push(v); words.push(v);
} }
for _ in range(0u, 20) { for _ in 0u..20 {
let mut input = vec![]; let mut input = vec![];
for _ in range(0u, 2000) { for _ in 0u..2000 {
input.push_all(r.choose(words.as_slice()).unwrap().as_slice()); input.push_all(r.choose(words.as_slice()).unwrap().as_slice());
} }
debug!("de/inflate of {} bytes of random word-sequences", debug!("de/inflate of {} bytes of random word-sequences",

View file

@ -111,7 +111,7 @@ use std::iter::repeat;
use std::result; use std::result;
/// Name of an option. Either a string or a single char. /// Name of an option. Either a string or a single char.
#[derive(Clone, PartialEq, Eq, Show)] #[derive(Clone, PartialEq, Eq, Debug)]
pub enum Name { pub enum Name {
/// A string representing the long name of an option. /// A string representing the long name of an option.
/// For example: "help" /// For example: "help"
@ -122,7 +122,7 @@ pub enum Name {
} }
/// Describes whether an option has an argument. /// Describes whether an option has an argument.
#[derive(Clone, Copy, PartialEq, Eq, Show)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum HasArg { pub enum HasArg {
/// The option requires an argument. /// The option requires an argument.
Yes, Yes,
@ -133,7 +133,7 @@ pub enum HasArg {
} }
/// Describes how often an option may occur. /// Describes how often an option may occur.
#[derive(Clone, Copy, PartialEq, Eq, Show)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Occur { pub enum Occur {
/// The option occurs once. /// The option occurs once.
Req, Req,
@ -144,7 +144,7 @@ pub enum Occur {
} }
/// A description of a possible option. /// A description of a possible option.
#[derive(Clone, PartialEq, Eq, Show)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct Opt { pub struct Opt {
/// Name of the option /// Name of the option
pub name: Name, pub name: Name,
@ -158,7 +158,7 @@ pub struct Opt {
/// One group of options, e.g., both `-h` and `--help`, along with /// One group of options, e.g., both `-h` and `--help`, along with
/// their shared description and properties. /// their shared description and properties.
#[derive(Clone, PartialEq, Eq, Show)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptGroup { pub struct OptGroup {
/// Short name of the option, e.g. `h` for a `-h` option /// Short name of the option, e.g. `h` for a `-h` option
pub short_name: String, pub short_name: String,
@ -175,7 +175,7 @@ pub struct OptGroup {
} }
/// Describes whether an option is given at all or has a value. /// Describes whether an option is given at all or has a value.
#[derive(Clone, PartialEq, Eq, Show)] #[derive(Clone, PartialEq, Eq, Debug)]
enum Optval { enum Optval {
Val(String), Val(String),
Given, Given,
@ -183,7 +183,7 @@ enum Optval {
/// The result of checking command line arguments. Contains a vector /// The result of checking command line arguments. Contains a vector
/// of matches and a vector of free strings. /// of matches and a vector of free strings.
#[derive(Clone, PartialEq, Eq, Show)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct Matches { pub struct Matches {
/// Options that matched /// Options that matched
opts: Vec<Opt>, opts: Vec<Opt>,
@ -196,7 +196,7 @@ pub struct Matches {
/// The type returned when the command line does not conform to the /// The type returned when the command line does not conform to the
/// expected format. Use the `Show` implementation to output detailed /// expected format. Use the `Show` implementation to output detailed
/// information. /// information.
#[derive(Clone, PartialEq, Eq, Show)] #[derive(Clone, PartialEq, Eq, Debug)]
pub enum Fail { pub enum Fail {
/// The option requires an argument but none was passed. /// The option requires an argument but none was passed.
ArgumentMissing(String), ArgumentMissing(String),
@ -211,7 +211,7 @@ pub enum Fail {
} }
/// The type of failure that occurred. /// The type of failure that occurred.
#[derive(Copy, PartialEq, Eq, Show)] #[derive(Copy, PartialEq, Eq, Debug)]
#[allow(missing_docs)] #[allow(missing_docs)]
pub enum FailType { pub enum FailType {
ArgumentMissing_, ArgumentMissing_,
@ -586,7 +586,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
fn f(_x: uint) -> Vec<Optval> { return Vec::new(); } fn f(_x: uint) -> Vec<Optval> { return Vec::new(); }
let mut vals: Vec<_> = range(0, n_opts).map(f).collect(); let mut vals: Vec<_> = (0..n_opts).map(f).collect();
let mut free: Vec<String> = Vec::new(); let mut free: Vec<String> = Vec::new();
let l = args.len(); let l = args.len();
let mut i = 0; let mut i = 0;
@ -693,7 +693,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
} }
i += 1; i += 1;
} }
for i in range(0u, n_opts) { for i in 0u..n_opts {
let n = vals[i].len(); let n = vals[i].len();
let occ = opts[i].occur; let occ = opts[i].occur;
if occ == Req && n == 0 { if occ == Req && n == 0 {
@ -761,7 +761,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
// here we just need to indent the start of the description // here we just need to indent the start of the description
let rowlen = row.chars().count(); let rowlen = row.chars().count();
if rowlen < 24 { if rowlen < 24 {
for _ in range(0, 24 - rowlen) { for _ in 0..24 - rowlen {
row.push(' '); row.push(' ');
} }
} else { } else {

View file

@ -176,7 +176,7 @@
//! } //! }
//! //!
//! impl<'a> dot::GraphWalk<'a, Nd, Ed<'a>> for Graph { //! impl<'a> dot::GraphWalk<'a, Nd, Ed<'a>> for Graph {
//! fn nodes(&self) -> dot::Nodes<'a,Nd> { range(0,self.nodes.len()).collect() } //! fn nodes(&self) -> dot::Nodes<'a,Nd> { (0..self.nodes.len()).collect() }
//! fn edges(&'a self) -> dot::Edges<'a,Ed<'a>> { self.edges.iter().collect() } //! fn edges(&'a self) -> dot::Edges<'a,Ed<'a>> { self.edges.iter().collect() }
//! fn source(&self, e: &Ed) -> Nd { let & &(s,_) = e; s } //! fn source(&self, e: &Ed) -> Nd { let & &(s,_) = e; s }
//! fn target(&self, e: &Ed) -> Nd { let & &(_,t) = e; t } //! fn target(&self, e: &Ed) -> Nd { let & &(_,t) = e; t }
@ -523,7 +523,7 @@ pub trait GraphWalk<'a, N, E> {
fn target(&'a self, edge: &E) -> N; fn target(&'a self, edge: &E) -> N;
} }
#[derive(Copy, PartialEq, Eq, Show)] #[derive(Copy, PartialEq, Eq, Debug)]
pub enum RenderOption { pub enum RenderOption {
NoEdgeLabels, NoEdgeLabels,
NoNodeLabels, NoNodeLabels,
@ -715,7 +715,7 @@ mod tests {
impl<'a> GraphWalk<'a, Node, &'a Edge> for LabelledGraph { impl<'a> GraphWalk<'a, Node, &'a Edge> for LabelledGraph {
fn nodes(&'a self) -> Nodes<'a,Node> { fn nodes(&'a self) -> Nodes<'a,Node> {
range(0u, self.node_labels.len()).collect() (0u..self.node_labels.len()).collect()
} }
fn edges(&'a self) -> Edges<'a,&'a Edge> { fn edges(&'a self) -> Edges<'a,&'a Edge> {
self.edges.iter().collect() self.edges.iter().collect()

View file

@ -11,7 +11,7 @@
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::cmp; use std::cmp;
#[derive(Show, Clone)] #[derive(Debug, Clone)]
pub struct LogDirective { pub struct LogDirective {
pub name: Option<String>, pub name: Option<String>,
pub level: u32, pub level: u32,

View file

@ -243,7 +243,7 @@ struct DefaultLogger {
} }
/// Wraps the log level with fmt implementations. /// Wraps the log level with fmt implementations.
#[derive(Copy, PartialEq, PartialOrd, Show)] #[derive(Copy, PartialEq, PartialOrd, Debug)]
pub struct LogLevel(pub u32); pub struct LogLevel(pub u32);
impl fmt::Display for LogLevel { impl fmt::Display for LogLevel {
@ -330,7 +330,7 @@ pub fn set_logger(logger: Box<Logger + Send>) -> Option<Box<Logger + Send>> {
/// A LogRecord is created by the logging macros, and passed as the only /// A LogRecord is created by the logging macros, and passed as the only
/// argument to Loggers. /// argument to Loggers.
#[derive(Show)] #[derive(Debug)]
pub struct LogRecord<'a> { pub struct LogRecord<'a> {
/// The module path of where the LogRecord originated. /// The module path of where the LogRecord originated.

View file

@ -69,11 +69,11 @@ macro_rules! double_round{
fn core(output: &mut [u32; STATE_WORDS], input: &[u32; STATE_WORDS]) { fn core(output: &mut [u32; STATE_WORDS], input: &[u32; STATE_WORDS]) {
*output = *input; *output = *input;
for _ in range(0, CHACHA_ROUNDS / 2) { for _ in 0..CHACHA_ROUNDS / 2 {
double_round!(output); double_round!(output);
} }
for i in range(0, STATE_WORDS) { for i in 0..STATE_WORDS {
output[i] += input[i]; output[i] += input[i];
} }
} }
@ -128,7 +128,7 @@ impl ChaChaRng {
self.state[2] = 0x79622D32; self.state[2] = 0x79622D32;
self.state[3] = 0x6B206574; self.state[3] = 0x6B206574;
for i in range(0, KEY_WORDS) { for i in 0..KEY_WORDS {
self.state[4+i] = key[i]; self.state[4+i] = key[i];
} }
@ -247,14 +247,14 @@ mod test {
let seed : &[_] = &[0u32; 8]; let seed : &[_] = &[0u32; 8];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed); let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let v = range(0, 16).map(|_| ra.next_u32()).collect::<Vec<_>>(); let v = (0..16).map(|_| ra.next_u32()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653, vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653,
0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b, 0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b,
0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8, 0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8,
0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2)); 0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2));
let v = range(0, 16).map(|_| ra.next_u32()).collect::<Vec<_>>(); let v = (0..16).map(|_| ra.next_u32()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73, vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73,
0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32, 0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32,
@ -268,9 +268,9 @@ mod test {
// Store the 17*i-th 32-bit word, // Store the 17*i-th 32-bit word,
// i.e., the i-th word of the i-th 16-word block // i.e., the i-th word of the i-th 16-word block
let mut v : Vec<u32> = Vec::new(); let mut v : Vec<u32> = Vec::new();
for _ in range(0u, 16) { for _ in 0u..16 {
v.push(ra.next_u32()); v.push(ra.next_u32());
for _ in range(0u, 16) { for _ in 0u..16 {
ra.next_u32(); ra.next_u32();
} }
} }
@ -287,7 +287,7 @@ mod test {
let seed : &[_] = &[0u32; 8]; let seed : &[_] = &[0u32; 8];
let mut rng: ChaChaRng = SeedableRng::from_seed(seed); let mut rng: ChaChaRng = SeedableRng::from_seed(seed);
let mut clone = rng.clone(); let mut clone = rng.clone();
for _ in range(0u, 16) { for _ in 0u..16 {
assert_eq!(rng.next_u64(), clone.next_u64()); assert_eq!(rng.next_u64(), clone.next_u64());
} }
} }

View file

@ -103,7 +103,7 @@ mod test {
fn test_exp() { fn test_exp() {
let mut exp = Exp::new(10.0); let mut exp = Exp::new(10.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
assert!(exp.sample(&mut rng) >= 0.0); assert!(exp.sample(&mut rng) >= 0.0);
assert!(exp.ind_sample(&mut rng) >= 0.0); assert!(exp.ind_sample(&mut rng) >= 0.0);
} }
@ -137,7 +137,7 @@ mod bench {
let mut exp = Exp::new(2.71828 * 3.14159); let mut exp = Exp::new(2.71828 * 3.14159);
b.iter(|| { b.iter(|| {
for _ in range(0, ::RAND_BENCH_N) { for _ in 0..::RAND_BENCH_N {
exp.sample(&mut rng); exp.sample(&mut rng);
} }
}); });

View file

@ -332,7 +332,7 @@ mod test {
fn test_chi_squared_one() { fn test_chi_squared_one() {
let mut chi = ChiSquared::new(1.0); let mut chi = ChiSquared::new(1.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
chi.sample(&mut rng); chi.sample(&mut rng);
chi.ind_sample(&mut rng); chi.ind_sample(&mut rng);
} }
@ -341,7 +341,7 @@ mod test {
fn test_chi_squared_small() { fn test_chi_squared_small() {
let mut chi = ChiSquared::new(0.5); let mut chi = ChiSquared::new(0.5);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
chi.sample(&mut rng); chi.sample(&mut rng);
chi.ind_sample(&mut rng); chi.ind_sample(&mut rng);
} }
@ -350,7 +350,7 @@ mod test {
fn test_chi_squared_large() { fn test_chi_squared_large() {
let mut chi = ChiSquared::new(30.0); let mut chi = ChiSquared::new(30.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
chi.sample(&mut rng); chi.sample(&mut rng);
chi.ind_sample(&mut rng); chi.ind_sample(&mut rng);
} }
@ -365,7 +365,7 @@ mod test {
fn test_f() { fn test_f() {
let mut f = FisherF::new(2.0, 32.0); let mut f = FisherF::new(2.0, 32.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
f.sample(&mut rng); f.sample(&mut rng);
f.ind_sample(&mut rng); f.ind_sample(&mut rng);
} }
@ -375,7 +375,7 @@ mod test {
fn test_t() { fn test_t() {
let mut t = StudentT::new(11.0); let mut t = StudentT::new(11.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
t.sample(&mut rng); t.sample(&mut rng);
t.ind_sample(&mut rng); t.ind_sample(&mut rng);
} }
@ -398,7 +398,7 @@ mod bench {
let mut rng = ::test::weak_rng(); let mut rng = ::test::weak_rng();
b.iter(|| { b.iter(|| {
for _ in range(0, ::RAND_BENCH_N) { for _ in 0..::RAND_BENCH_N {
gamma.ind_sample(&mut rng); gamma.ind_sample(&mut rng);
} }
}); });
@ -411,7 +411,7 @@ mod bench {
let mut rng = ::test::weak_rng(); let mut rng = ::test::weak_rng();
b.iter(|| { b.iter(|| {
for _ in range(0, ::RAND_BENCH_N) { for _ in 0..::RAND_BENCH_N {
gamma.ind_sample(&mut rng); gamma.ind_sample(&mut rng);
} }
}); });

View file

@ -97,7 +97,7 @@ pub struct Weighted<T> {
/// Weighted { weight: 1, item: 'c' }); /// Weighted { weight: 1, item: 'c' });
/// let wc = WeightedChoice::new(items.as_mut_slice()); /// let wc = WeightedChoice::new(items.as_mut_slice());
/// let mut rng = rand::thread_rng(); /// let mut rng = rand::thread_rng();
/// for _ in range(0u, 16) { /// for _ in 0u..16 {
/// // on average prints 'a' 4 times, 'b' 8 and 'c' twice. /// // on average prints 'a' 4 times, 'b' 8 and 'c' twice.
/// println!("{}", wc.ind_sample(&mut rng)); /// println!("{}", wc.ind_sample(&mut rng));
/// } /// }
@ -263,7 +263,7 @@ mod tests {
use {Rng, Rand}; use {Rng, Rand};
use super::{RandSample, WeightedChoice, Weighted, Sample, IndependentSample}; use super::{RandSample, WeightedChoice, Weighted, Sample, IndependentSample};
#[derive(PartialEq, Show)] #[derive(PartialEq, Debug)]
struct ConstRand(uint); struct ConstRand(uint);
impl Rand for ConstRand { impl Rand for ConstRand {
fn rand<R: Rng>(_: &mut R) -> ConstRand { fn rand<R: Rng>(_: &mut R) -> ConstRand {

View file

@ -169,7 +169,7 @@ mod tests {
fn test_normal() { fn test_normal() {
let mut norm = Normal::new(10.0, 10.0); let mut norm = Normal::new(10.0, 10.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
norm.sample(&mut rng); norm.sample(&mut rng);
norm.ind_sample(&mut rng); norm.ind_sample(&mut rng);
} }
@ -185,7 +185,7 @@ mod tests {
fn test_log_normal() { fn test_log_normal() {
let mut lnorm = LogNormal::new(10.0, 10.0); let mut lnorm = LogNormal::new(10.0, 10.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in range(0u, 1000) { for _ in 0u..1000 {
lnorm.sample(&mut rng); lnorm.sample(&mut rng);
lnorm.ind_sample(&mut rng); lnorm.ind_sample(&mut rng);
} }
@ -212,7 +212,7 @@ mod bench {
let mut normal = Normal::new(-2.71828, 3.14159); let mut normal = Normal::new(-2.71828, 3.14159);
b.iter(|| { b.iter(|| {
for _ in range(0, ::RAND_BENCH_N) { for _ in 0..::RAND_BENCH_N {
normal.sample(&mut rng); normal.sample(&mut rng);
} }
}); });

View file

@ -41,7 +41,7 @@ use distributions::{Sample, IndependentSample};
/// let between = Range::new(10u, 10000u); /// let between = Range::new(10u, 10000u);
/// let mut rng = std::rand::thread_rng(); /// let mut rng = std::rand::thread_rng();
/// let mut sum = 0; /// let mut sum = 0;
/// for _ in range(0u, 1000) { /// for _ in 0u..1000 {
/// sum += between.ind_sample(&mut rng); /// sum += between.ind_sample(&mut rng);
/// } /// }
/// println!("{}", sum); /// println!("{}", sum);
@ -190,7 +190,7 @@ mod tests {
(Int::min_value(), Int::max_value())]; (Int::min_value(), Int::max_value())];
for &(low, high) in v.iter() { for &(low, high) in v.iter() {
let mut sampler: Range<$ty> = Range::new(low, high); let mut sampler: Range<$ty> = Range::new(low, high);
for _ in range(0u, 1000) { for _ in 0u..1000 {
let v = sampler.sample(&mut rng); let v = sampler.sample(&mut rng);
assert!(low <= v && v < high); assert!(low <= v && v < high);
let v = sampler.ind_sample(&mut rng); let v = sampler.ind_sample(&mut rng);
@ -216,7 +216,7 @@ mod tests {
(-1e35, 1e35)]; (-1e35, 1e35)];
for &(low, high) in v.iter() { for &(low, high) in v.iter() {
let mut sampler: Range<$ty> = Range::new(low, high); let mut sampler: Range<$ty> = Range::new(low, high);
for _ in range(0u, 1000) { for _ in 0u..1000 {
let v = sampler.sample(&mut rng); let v = sampler.sample(&mut rng);
assert!(low <= v && v < high); assert!(low <= v && v < high);
let v = sampler.ind_sample(&mut rng); let v = sampler.ind_sample(&mut rng);

View file

@ -82,7 +82,7 @@ impl IsaacRng {
}} }}
} }
for _ in range(0u, 4) { for _ in 0u..4 {
mix!(); mix!();
} }
@ -323,14 +323,14 @@ impl Isaac64Rng {
}} }}
} }
for _ in range(0u, 4) { for _ in 0u..4 {
mix!(); mix!();
} }
if use_rsl { if use_rsl {
macro_rules! memloop { macro_rules! memloop {
($arr:expr) => {{ ($arr:expr) => {{
for i in range(0, RAND_SIZE_64 / 8).map(|i| i * 8) { for i in (0..RAND_SIZE_64 / 8).map(|i| i * 8) {
a+=$arr[i ]; b+=$arr[i+1]; a+=$arr[i ]; b+=$arr[i+1];
c+=$arr[i+2]; d+=$arr[i+3]; c+=$arr[i+2]; d+=$arr[i+3];
e+=$arr[i+4]; f+=$arr[i+5]; e+=$arr[i+4]; f+=$arr[i+5];
@ -347,7 +347,7 @@ impl Isaac64Rng {
memloop!(self.rsl); memloop!(self.rsl);
memloop!(self.mem); memloop!(self.mem);
} else { } else {
for i in range(0, RAND_SIZE_64 / 8).map(|i| i * 8) { for i in (0..RAND_SIZE_64 / 8).map(|i| i * 8) {
mix!(); mix!();
self.mem[i ]=a; self.mem[i+1]=b; self.mem[i ]=a; self.mem[i+1]=b;
self.mem[i+2]=c; self.mem[i+3]=d; self.mem[i+2]=c; self.mem[i+3]=d;
@ -374,7 +374,7 @@ impl Isaac64Rng {
} }
for &(mr_offset, m2_offset) in MP_VEC.iter() { for &(mr_offset, m2_offset) in MP_VEC.iter() {
for base in range(0, MIDPOINT / 4).map(|i| i * 4) { for base in (0..MIDPOINT / 4).map(|i| i * 4) {
macro_rules! rngstepp { macro_rules! rngstepp {
($j:expr, $shift:expr) => {{ ($j:expr, $shift:expr) => {{
@ -573,7 +573,7 @@ mod test {
let seed: &[_] = &[1, 23, 456, 7890, 12345]; let seed: &[_] = &[1, 23, 456, 7890, 12345];
let mut ra: IsaacRng = SeedableRng::from_seed(seed); let mut ra: IsaacRng = SeedableRng::from_seed(seed);
// Regression test that isaac is actually using the above vector // Regression test that isaac is actually using the above vector
let v = range(0, 10).map(|_| ra.next_u32()).collect::<Vec<_>>(); let v = (0..10).map(|_| ra.next_u32()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
vec!(2558573138, 873787463, 263499565, 2103644246, 3595684709, vec!(2558573138, 873787463, 263499565, 2103644246, 3595684709,
4203127393, 264982119, 2765226902, 2737944514, 3900253796)); 4203127393, 264982119, 2765226902, 2737944514, 3900253796));
@ -581,9 +581,9 @@ mod test {
let seed: &[_] = &[12345, 67890, 54321, 9876]; let seed: &[_] = &[12345, 67890, 54321, 9876];
let mut rb: IsaacRng = SeedableRng::from_seed(seed); let mut rb: IsaacRng = SeedableRng::from_seed(seed);
// skip forward to the 10000th number // skip forward to the 10000th number
for _ in range(0u, 10000) { rb.next_u32(); } for _ in 0u..10000 { rb.next_u32(); }
let v = range(0, 10).map(|_| rb.next_u32()).collect::<Vec<_>>(); let v = (0..10).map(|_| rb.next_u32()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
vec!(3676831399, 3183332890, 2834741178, 3854698763, 2717568474, vec!(3676831399, 3183332890, 2834741178, 3854698763, 2717568474,
1576568959, 3507990155, 179069555, 141456972, 2478885421)); 1576568959, 3507990155, 179069555, 141456972, 2478885421));
@ -593,7 +593,7 @@ mod test {
let seed: &[_] = &[1, 23, 456, 7890, 12345]; let seed: &[_] = &[1, 23, 456, 7890, 12345];
let mut ra: Isaac64Rng = SeedableRng::from_seed(seed); let mut ra: Isaac64Rng = SeedableRng::from_seed(seed);
// Regression test that isaac is actually using the above vector // Regression test that isaac is actually using the above vector
let v = range(0, 10).map(|_| ra.next_u64()).collect::<Vec<_>>(); let v = (0..10).map(|_| ra.next_u64()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
vec!(547121783600835980, 14377643087320773276, 17351601304698403469, vec!(547121783600835980, 14377643087320773276, 17351601304698403469,
1238879483818134882, 11952566807690396487, 13970131091560099343, 1238879483818134882, 11952566807690396487, 13970131091560099343,
@ -603,9 +603,9 @@ mod test {
let seed: &[_] = &[12345, 67890, 54321, 9876]; let seed: &[_] = &[12345, 67890, 54321, 9876];
let mut rb: Isaac64Rng = SeedableRng::from_seed(seed); let mut rb: Isaac64Rng = SeedableRng::from_seed(seed);
// skip forward to the 10000th number // skip forward to the 10000th number
for _ in range(0u, 10000) { rb.next_u64(); } for _ in 0u..10000 { rb.next_u64(); }
let v = range(0, 10).map(|_| rb.next_u64()).collect::<Vec<_>>(); let v = (0..10).map(|_| rb.next_u64()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
vec!(18143823860592706164, 8491801882678285927, 2699425367717515619, vec!(18143823860592706164, 8491801882678285927, 2699425367717515619,
17196852593171130876, 2606123525235546165, 15790932315217671084, 17196852593171130876, 2606123525235546165, 15790932315217671084,
@ -618,7 +618,7 @@ mod test {
let seed: &[_] = &[1, 23, 456, 7890, 12345]; let seed: &[_] = &[1, 23, 456, 7890, 12345];
let mut rng: Isaac64Rng = SeedableRng::from_seed(seed); let mut rng: Isaac64Rng = SeedableRng::from_seed(seed);
let mut clone = rng.clone(); let mut clone = rng.clone();
for _ in range(0u, 16) { for _ in 0u..16 {
assert_eq!(rng.next_u64(), clone.next_u64()); assert_eq!(rng.next_u64(), clone.next_u64());
} }
} }

View file

@ -241,7 +241,7 @@ mod tests {
// this is unlikely to catch an incorrect implementation that // this is unlikely to catch an incorrect implementation that
// generates exactly 0 or 1, but it keeps it sane. // generates exactly 0 or 1, but it keeps it sane.
let mut rng = thread_rng(); let mut rng = thread_rng();
for _ in range(0u, 1_000) { for _ in 0u..1_000 {
// strict inequalities // strict inequalities
let Open01(f) = rng.gen::<Open01<f64>>(); let Open01(f) = rng.gen::<Open01<f64>>();
assert!(0.0 < f && f < 1.0); assert!(0.0 < f && f < 1.0);
@ -254,7 +254,7 @@ mod tests {
#[test] #[test]
fn rand_closed() { fn rand_closed() {
let mut rng = thread_rng(); let mut rng = thread_rng();
for _ in range(0u, 1_000) { for _ in 0u..1_000 {
// strict inequalities // strict inequalities
let Closed01(f) = rng.gen::<Closed01<f64>>(); let Closed01(f) = rng.gen::<Closed01<f64>>();
assert!(0.0 <= f && f <= 1.0); assert!(0.0 <= f && f <= 1.0);

View file

@ -187,7 +187,7 @@ mod test {
let mut rs = ReseedingRng::new(Counter {i:0}, 400, ReseedWithDefault); let mut rs = ReseedingRng::new(Counter {i:0}, 400, ReseedWithDefault);
let mut i = 0; let mut i = 0;
for _ in range(0u, 1000) { for _ in 0u..1000 {
assert_eq!(rs.next_u32(), i % 100); assert_eq!(rs.next_u32(), i % 100);
i += 1; i += 1;
} }

View file

@ -189,7 +189,7 @@ mod tests {
b.bytes = (times * len) as u64; b.bytes = (times * len) as u64;
b.iter(|| { b.iter(|| {
let mut wr = SeekableMemWriter::new(); let mut wr = SeekableMemWriter::new();
for _ in range(0, times) { for _ in 0..times {
wr.write(src.as_slice()).unwrap(); wr.write(src.as_slice()).unwrap();
} }

View file

@ -77,7 +77,7 @@ pub struct TaggedDoc<'a> {
pub doc: Doc<'a>, pub doc: Doc<'a>,
} }
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum EbmlEncoderTag { pub enum EbmlEncoderTag {
EsUint, // 0 EsUint, // 0
EsU64, // 1 EsU64, // 1
@ -111,7 +111,7 @@ pub enum EbmlEncoderTag {
EsLabel, // Used only when debugging EsLabel, // Used only when debugging
} }
#[derive(Show)] #[derive(Debug)]
pub enum Error { pub enum Error {
IntTooBig(uint), IntTooBig(uint),
Expected(String), Expected(String),
@ -1184,7 +1184,7 @@ mod bench {
#[bench] #[bench]
pub fn vuint_at_A_aligned(b: &mut Bencher) { pub fn vuint_at_A_aligned(b: &mut Bencher) {
let data = range(0, 4*100).map(|i| { let data = (0i32..4*100).map(|i| {
match i % 2 { match i % 2 {
0 => 0x80u8, 0 => 0x80u8,
_ => i as u8, _ => i as u8,
@ -1202,7 +1202,7 @@ mod bench {
#[bench] #[bench]
pub fn vuint_at_A_unaligned(b: &mut Bencher) { pub fn vuint_at_A_unaligned(b: &mut Bencher) {
let data = range(0, 4*100+1).map(|i| { let data = (0i32..4*100+1).map(|i| {
match i % 2 { match i % 2 {
1 => 0x80u8, 1 => 0x80u8,
_ => i as u8 _ => i as u8
@ -1220,7 +1220,7 @@ mod bench {
#[bench] #[bench]
pub fn vuint_at_D_aligned(b: &mut Bencher) { pub fn vuint_at_D_aligned(b: &mut Bencher) {
let data = range(0, 4*100).map(|i| { let data = (0i32..4*100).map(|i| {
match i % 4 { match i % 4 {
0 => 0x10u8, 0 => 0x10u8,
3 => i as u8, 3 => i as u8,
@ -1239,7 +1239,7 @@ mod bench {
#[bench] #[bench]
pub fn vuint_at_D_unaligned(b: &mut Bencher) { pub fn vuint_at_D_unaligned(b: &mut Bencher) {
let data = range(0, 4*100+1).map(|i| { let data = (0i32..4*100+1).map(|i| {
match i % 4 { match i % 4 {
1 => 0x10u8, 1 => 0x10u8,
0 => i as u8, 0 => i as u8,

View file

@ -540,7 +540,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
run_lints!(self, exit_lint_attrs, attrs); run_lints!(self, exit_lint_attrs, attrs);
// rollback // rollback
for _ in range(0, pushed) { for _ in 0..pushed {
let (lint, lvlsrc) = self.level_stack.pop().unwrap(); let (lint, lvlsrc) = self.level_stack.pop().unwrap();
self.lints.set_level(lint, lvlsrc); self.lints.set_level(lint, lvlsrc);
} }

View file

@ -40,7 +40,7 @@ use syntax::ast;
pub use lint::context::{Context, LintStore, raw_emit_lint, check_crate, gather_attrs}; pub use lint::context::{Context, LintStore, raw_emit_lint, check_crate, gather_attrs};
/// Specification of a single lint. /// Specification of a single lint.
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub struct Lint { pub struct Lint {
/// A string identifier for the lint. /// A string identifier for the lint.
/// ///
@ -207,7 +207,7 @@ impl LintId {
} }
/// Setting for how to handle a lint. /// Setting for how to handle a lint.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Show)] #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug)]
pub enum Level { pub enum Level {
Allow, Warn, Deny, Forbid Allow, Warn, Deny, Forbid
} }

View file

@ -219,7 +219,7 @@ pub const tag_items_data_item_stability: uint = 0x92;
pub const tag_items_data_item_repr: uint = 0x93; pub const tag_items_data_item_repr: uint = 0x93;
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct LinkMeta { pub struct LinkMeta {
pub crate_name: String, pub crate_name: String,
pub crate_hash: Svh, pub crate_hash: Svh,

View file

@ -49,7 +49,7 @@ pub struct crate_metadata {
pub span: Span, pub span: Span,
} }
#[derive(Copy, Show, PartialEq, Clone)] #[derive(Copy, Debug, PartialEq, Clone)]
pub enum LinkagePreference { pub enum LinkagePreference {
RequireDynamic, RequireDynamic,
RequireStatic, RequireStatic,

View file

@ -493,7 +493,7 @@ pub fn get_symbol(data: &[u8], id: ast::NodeId) -> String {
} }
// Something that a name can resolve to. // Something that a name can resolve to.
#[derive(Copy, Clone, Show)] #[derive(Copy, Clone, Debug)]
pub enum DefLike { pub enum DefLike {
DlDef(def::Def), DlDef(def::Def),
DlImpl(ast::DefId), DlImpl(ast::DefId),

View file

@ -1598,7 +1598,7 @@ fn encode_index<T, F>(rbml_w: &mut Encoder, index: Vec<entry<T>>, mut write_fn:
F: FnMut(&mut SeekableMemWriter, &T), F: FnMut(&mut SeekableMemWriter, &T),
T: Hash<SipHasher>, T: Hash<SipHasher>,
{ {
let mut buckets: Vec<Vec<entry<T>>> = range(0, 256u16).map(|_| Vec::new()).collect(); let mut buckets: Vec<Vec<entry<T>>> = (0..256u16).map(|_| Vec::new()).collect();
for elt in index.into_iter() { for elt in index.into_iter() {
let mut s = SipHasher::new(); let mut s = SipHasher::new();
elt.val.hash(&mut s); elt.val.hash(&mut s);

View file

@ -43,7 +43,7 @@ use syntax::parse::token;
// def-id will depend on where it originated from. Therefore, the conversion // def-id will depend on where it originated from. Therefore, the conversion
// function is given an indicator of the source of the def-id. See // function is given an indicator of the source of the def-id. See
// astencode.rs for more information. // astencode.rs for more information.
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum DefIdSource { pub enum DefIdSource {
// Identifies a struct, trait, enum, etc. // Identifies a struct, trait, enum, etc.
NominalType, NominalType,
@ -132,7 +132,7 @@ pub fn parse_state_from_data<'a, 'tcx>(data: &'a [u8], crate_num: ast::CrateNum,
fn data_log_string(data: &[u8], pos: uint) -> String { fn data_log_string(data: &[u8], pos: uint) -> String {
let mut buf = String::new(); let mut buf = String::new();
buf.push_str("<<"); buf.push_str("<<");
for i in range(pos, data.len()) { for i in pos..data.len() {
let c = data[i]; let c = data[i];
if c > 0x20 && c <= 0x7F { if c > 0x20 && c <= 0x7F {
buf.push(c as char); buf.push(c as char);

View file

@ -1293,7 +1293,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
} }
ty::AdjustDerefRef(ref adj) => { ty::AdjustDerefRef(ref adj) => {
assert!(!ty::adjust_is_object(adjustment)); assert!(!ty::adjust_is_object(adjustment));
for autoderef in range(0, adj.autoderefs) { for autoderef in 0..adj.autoderefs {
let method_call = MethodCall::autoderef(id, autoderef); let method_call = MethodCall::autoderef(id, autoderef);
for &method in tcx.method_map.borrow().get(&method_call).iter() { for &method in tcx.method_map.borrow().get(&method_call).iter() {
rbml_w.tag(c::tag_table_method_map, |rbml_w| { rbml_w.tag(c::tag_table_method_map, |rbml_w| {
@ -1529,7 +1529,7 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn type_string(doc: rbml::Doc) -> String { fn type_string(doc: rbml::Doc) -> String {
let mut str = String::new(); let mut str = String::new();
for i in range(doc.start, doc.end) { for i in doc.start..doc.end {
str.push(doc.data[i] as char); str.push(doc.data[i] as char);
} }
str str

View file

@ -70,7 +70,7 @@ impl<'a> fmt::Debug for Matrix<'a> {
let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0u); let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0u);
assert!(m.iter().all(|row| row.len() == column_count)); assert!(m.iter().all(|row| row.len() == column_count));
let column_widths: Vec<uint> = range(0, column_count).map(|col| { let column_widths: Vec<uint> = (0..column_count).map(|col| {
pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0u) pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0u)
}).collect(); }).collect();
@ -609,7 +609,7 @@ fn is_useful(cx: &MatchCheckCtxt,
let arity = constructor_arity(cx, &c, left_ty); let arity = constructor_arity(cx, &c, left_ty);
let mut result = { let mut result = {
let pat_slice = &pats[]; let pat_slice = &pats[];
let subpats: Vec<_> = range(0, arity).map(|i| { let subpats: Vec<_> = (0..arity).map(|i| {
pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
}).collect(); }).collect();
vec![construct_witness(cx, &c, subpats, left_ty)] vec![construct_witness(cx, &c, subpats, left_ty)]

View file

@ -28,7 +28,7 @@ use syntax::visit;
use syntax::print::{pp, pprust}; use syntax::print::{pp, pprust};
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum EntryOrExit { pub enum EntryOrExit {
Entry, Entry,
Exit, Exit,
@ -352,7 +352,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
for (word_index, &word) in words.iter().enumerate() { for (word_index, &word) in words.iter().enumerate() {
if word != 0 { if word != 0 {
let base_index = word_index * uint::BITS; let base_index = word_index * uint::BITS;
for offset in range(0u, uint::BITS) { for offset in 0u..uint::BITS {
let bit = 1 << offset; let bit = 1 << offset;
if (word & bit) != 0 { if (word & bit) != 0 {
// NB: we round up the total number of bits // NB: we round up the total number of bits
@ -552,7 +552,7 @@ fn bits_to_string(words: &[uint]) -> String {
for &word in words.iter() { for &word in words.iter() {
let mut v = word; let mut v = word;
for _ in range(0u, uint::BYTES) { for _ in 0u..uint::BYTES {
result.push(sep); result.push(sep);
result.push_str(&format!("{:02x}", v & 0xFF)[]); result.push_str(&format!("{:02x}", v & 0xFF)[]);
v >>= 8; v >>= 8;

View file

@ -20,7 +20,7 @@ use syntax::ast_util::local_def;
use std::cell::RefCell; use std::cell::RefCell;
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Def { pub enum Def {
DefFn(ast::DefId, bool /* is_ctor */), DefFn(ast::DefId, bool /* is_ctor */),
DefStaticMethod(/* method */ ast::DefId, MethodProvenance), DefStaticMethod(/* method */ ast::DefId, MethodProvenance),
@ -72,13 +72,13 @@ pub struct Export {
pub def_id: ast::DefId, // The definition of the target. pub def_id: ast::DefId, // The definition of the target.
} }
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum MethodProvenance { pub enum MethodProvenance {
FromTrait(ast::DefId), FromTrait(ast::DefId),
FromImpl(ast::DefId), FromImpl(ast::DefId),
} }
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TyParamProvenance { pub enum TyParamProvenance {
FromSelf(ast::DefId), FromSelf(ast::DefId),
FromParam(ast::DefId), FromParam(ast::DefId),

View file

@ -157,7 +157,7 @@ fn calculate_type(sess: &session::Session,
}); });
// Collect what we've got so far in the return vector. // Collect what we've got so far in the return vector.
let mut ret = range(1, sess.cstore.next_crate_num()).map(|i| { let mut ret = (1..sess.cstore.next_crate_num()).map(|i| {
match formats.get(&i).map(|v| *v) { match formats.get(&i).map(|v| *v) {
v @ Some(cstore::RequireDynamic) => v, v @ Some(cstore::RequireDynamic) => v,
_ => None, _ => None,

View file

@ -95,7 +95,7 @@ pub trait Delegate<'tcx> {
mode: MutateMode); mode: MutateMode);
} }
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum LoanCause { pub enum LoanCause {
ClosureCapture(Span), ClosureCapture(Span),
AddrOf, AddrOf,
@ -107,20 +107,20 @@ pub enum LoanCause {
MatchDiscriminant MatchDiscriminant
} }
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum ConsumeMode { pub enum ConsumeMode {
Copy, // reference to x where x has a type that copies Copy, // reference to x where x has a type that copies
Move(MoveReason), // reference to x where x has a type that moves Move(MoveReason), // reference to x where x has a type that moves
} }
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum MoveReason { pub enum MoveReason {
DirectRefMove, DirectRefMove,
PatBindingMove, PatBindingMove,
CaptureMove, CaptureMove,
} }
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum MatchMode { pub enum MatchMode {
NonBindingMatch, NonBindingMatch,
BorrowingMatch, BorrowingMatch,
@ -128,7 +128,7 @@ pub enum MatchMode {
MovingMatch, MovingMatch,
} }
#[derive(PartialEq,Show)] #[derive(PartialEq,Debug)]
enum TrackMatchMode<T> { enum TrackMatchMode<T> {
Unknown, Unknown,
Definite(MatchMode), Definite(MatchMode),
@ -197,7 +197,7 @@ impl<T> TrackMatchMode<T> {
} }
} }
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum MutateMode { pub enum MutateMode {
Init, Init,
JustWrite, // x = y JustWrite, // x = y
@ -842,7 +842,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
autoderefs: uint) { autoderefs: uint) {
debug!("walk_autoderefs expr={} autoderefs={}", expr.repr(self.tcx()), autoderefs); debug!("walk_autoderefs expr={} autoderefs={}", expr.repr(self.tcx()), autoderefs);
for i in range(0, autoderefs) { for i in 0..autoderefs {
let deref_id = ty::MethodCall::autoderef(expr.id, i); let deref_id = ty::MethodCall::autoderef(expr.id, i);
match self.typer.node_method_ty(deref_id) { match self.typer.node_method_ty(deref_id) {
None => {} None => {}

View file

@ -61,18 +61,18 @@ impl<E: Debug> Debug for Edge<E> {
} }
} }
#[derive(Clone, Copy, PartialEq, Show)] #[derive(Clone, Copy, PartialEq, Debug)]
pub struct NodeIndex(pub uint); pub struct NodeIndex(pub uint);
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const InvalidNodeIndex: NodeIndex = NodeIndex(uint::MAX); pub const InvalidNodeIndex: NodeIndex = NodeIndex(uint::MAX);
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub struct EdgeIndex(pub uint); pub struct EdgeIndex(pub uint);
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const InvalidEdgeIndex: EdgeIndex = EdgeIndex(uint::MAX); pub const InvalidEdgeIndex: EdgeIndex = EdgeIndex(uint::MAX);
// Use a private field here to guarantee no more instances are created: // Use a private field here to guarantee no more instances are created:
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub struct Direction { repr: uint } pub struct Direction { repr: uint }
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const Outgoing: Direction = Direction { repr: 0 }; pub const Outgoing: Direction = Direction { repr: 0 };

View file

@ -176,7 +176,7 @@ pub trait Combine<'tcx> : Sized {
assert_eq!(num_region_params, a_rs.len()); assert_eq!(num_region_params, a_rs.len());
assert_eq!(num_region_params, b_rs.len()); assert_eq!(num_region_params, b_rs.len());
let mut rs = vec!(); let mut rs = vec!();
for i in range(0, num_region_params) { for i in 0..num_region_params {
let a_r = a_rs[i]; let a_r = a_rs[i];
let b_r = b_rs[i]; let b_r = b_rs[i];
let variance = variances[i]; let variance = variances[i];

View file

@ -1229,8 +1229,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
let mut insert = Vec::new(); let mut insert = Vec::new();
if lifetimes.len() == 0 { if lifetimes.len() == 0 {
let anon = self.cur_anon.get(); let anon = self.cur_anon.get();
for (i, a) in range(anon, for (i, a) in (anon..anon+expected).enumerate() {
anon+expected).enumerate() {
if anon_nums.contains(&a) { if anon_nums.contains(&a) {
insert.push(i as u32); insert.push(i as u32);
} }
@ -1343,11 +1342,11 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
let mut new_lts = Vec::new(); let mut new_lts = Vec::new();
if data.lifetimes.len() == 0 { if data.lifetimes.len() == 0 {
// traverse once to see if there's a need to insert lifetime // traverse once to see if there's a need to insert lifetime
let need_insert = range(0, expected).any(|i| { let need_insert = (0..expected).any(|i| {
indexes.contains(&i) indexes.contains(&i)
}); });
if need_insert { if need_insert {
for i in range(0, expected) { for i in 0..expected {
if indexes.contains(&i) { if indexes.contains(&i) {
new_lts.push(lifetime); new_lts.push(lifetime);
} else { } else {
@ -1767,7 +1766,7 @@ impl LifeGiver {
let mut s = String::new(); let mut s = String::new();
let (n, r) = (counter/26 + 1, counter % 26); let (n, r) = (counter/26 + 1, counter % 26);
let letter: char = from_u32((r+97) as u32).unwrap(); let letter: char = from_u32((r+97) as u32).unwrap();
for _ in range(0, n) { for _ in 0..n {
s.push(letter); s.push(letter);
} }
s s

View file

@ -95,7 +95,7 @@ pub type SkolemizationMap = FnvHashMap<ty::BoundRegion,ty::Region>;
/// Why did we require that the two types be related? /// Why did we require that the two types be related?
/// ///
/// See `error_reporting.rs` for more details /// See `error_reporting.rs` for more details
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub enum TypeOrigin { pub enum TypeOrigin {
// Not yet categorized in a better way // Not yet categorized in a better way
Misc(Span), Misc(Span),
@ -133,7 +133,7 @@ pub enum TypeOrigin {
} }
/// See `error_reporting.rs` for more details /// See `error_reporting.rs` for more details
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum ValuePairs<'tcx> { pub enum ValuePairs<'tcx> {
Types(ty::expected_found<Ty<'tcx>>), Types(ty::expected_found<Ty<'tcx>>),
TraitRefs(ty::expected_found<Rc<ty::TraitRef<'tcx>>>), TraitRefs(ty::expected_found<Rc<ty::TraitRef<'tcx>>>),
@ -144,7 +144,7 @@ pub enum ValuePairs<'tcx> {
/// encounter an error or subtyping constraint. /// encounter an error or subtyping constraint.
/// ///
/// See `error_reporting.rs` for more details. /// See `error_reporting.rs` for more details.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct TypeTrace<'tcx> { pub struct TypeTrace<'tcx> {
origin: TypeOrigin, origin: TypeOrigin,
values: ValuePairs<'tcx>, values: ValuePairs<'tcx>,
@ -153,7 +153,7 @@ pub struct TypeTrace<'tcx> {
/// The origin of a `r1 <= r2` constraint. /// The origin of a `r1 <= r2` constraint.
/// ///
/// See `error_reporting.rs` for more details /// See `error_reporting.rs` for more details
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum SubregionOrigin<'tcx> { pub enum SubregionOrigin<'tcx> {
// Arose from a subtyping relation // Arose from a subtyping relation
Subtype(TypeTrace<'tcx>), Subtype(TypeTrace<'tcx>),
@ -222,7 +222,7 @@ pub enum SubregionOrigin<'tcx> {
} }
/// Times when we replace late-bound regions with variables: /// Times when we replace late-bound regions with variables:
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub enum LateBoundRegionConversionTime { pub enum LateBoundRegionConversionTime {
/// when a fn is called /// when a fn is called
FnCall, FnCall,
@ -237,7 +237,7 @@ pub enum LateBoundRegionConversionTime {
/// Reasons to create a region inference variable /// Reasons to create a region inference variable
/// ///
/// See `error_reporting.rs` for more details /// See `error_reporting.rs` for more details
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum RegionVariableOrigin<'tcx> { pub enum RegionVariableOrigin<'tcx> {
// Region variables created for ill-categorized reasons, // Region variables created for ill-categorized reasons,
// mostly indicates places in need of refactoring // mostly indicates places in need of refactoring
@ -270,7 +270,7 @@ pub enum RegionVariableOrigin<'tcx> {
BoundRegionInCoherence(ast::Name), BoundRegionInCoherence(ast::Name),
} }
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum fixup_err { pub enum fixup_err {
unresolved_int_ty(IntVid), unresolved_int_ty(IntVid),
unresolved_float_ty(FloatVid), unresolved_float_ty(FloatVid),
@ -828,7 +828,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
} }
pub fn next_ty_vars(&self, n: uint) -> Vec<Ty<'tcx>> { pub fn next_ty_vars(&self, n: uint) -> Vec<Ty<'tcx>> {
range(0, n).map(|_i| self.next_ty_var()).collect() (0..n).map(|_i| self.next_ty_var()).collect()
} }
pub fn next_int_var_id(&self) -> IntVid { pub fn next_int_var_id(&self) -> IntVid {

View file

@ -120,7 +120,7 @@ struct ConstraintGraph<'a, 'tcx: 'a> {
node_ids: FnvHashMap<Node, uint>, node_ids: FnvHashMap<Node, uint>,
} }
#[derive(Clone, Hash, PartialEq, Eq, Show)] #[derive(Clone, Hash, PartialEq, Eq, Debug)]
enum Node { enum Node {
RegionVid(ty::RegionVid), RegionVid(ty::RegionVid),
Region(ty::Region), Region(ty::Region),

View file

@ -42,7 +42,7 @@ mod doc;
mod graphviz; mod graphviz;
// A constraint that influences the inference process. // A constraint that influences the inference process.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum Constraint { pub enum Constraint {
// One region variable is subregion of another // One region variable is subregion of another
ConstrainVarSubVar(RegionVid, RegionVid), ConstrainVarSubVar(RegionVid, RegionVid),
@ -69,7 +69,7 @@ pub enum Verify<'tcx> {
VerifyGenericBound(GenericKind<'tcx>, SubregionOrigin<'tcx>, Region, Vec<Region>), VerifyGenericBound(GenericKind<'tcx>, SubregionOrigin<'tcx>, Region, Vec<Region>),
} }
#[derive(Clone, Show, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum GenericKind<'tcx> { pub enum GenericKind<'tcx> {
Param(ty::ParamTy), Param(ty::ParamTy),
Projection(ty::ProjectionTy<'tcx>), Projection(ty::ProjectionTy<'tcx>),
@ -97,7 +97,7 @@ pub enum CombineMapType {
Lub, Glb Lub, Glb
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum RegionResolutionError<'tcx> { pub enum RegionResolutionError<'tcx> {
/// `ConcreteFailure(o, a, b)`: /// `ConcreteFailure(o, a, b)`:
/// ///
@ -149,7 +149,7 @@ pub enum RegionResolutionError<'tcx> {
/// ``` /// ```
/// would report an error because we expect 'a and 'b to match, and so we group /// would report an error because we expect 'a and 'b to match, and so we group
/// 'a and 'b together inside a SameRegions struct /// 'a and 'b together inside a SameRegions struct
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct SameRegions { pub struct SameRegions {
pub scope_id: ast::NodeId, pub scope_id: ast::NodeId,
pub regions: Vec<BoundRegion> pub regions: Vec<BoundRegion>
@ -223,7 +223,7 @@ pub struct RegionVarBindings<'a, 'tcx: 'a> {
values: RefCell<Option<Vec<VarValue>>>, values: RefCell<Option<Vec<VarValue>>>,
} }
#[derive(Show)] #[derive(Debug)]
#[allow(missing_copy_implementations)] #[allow(missing_copy_implementations)]
pub struct RegionSnapshot { pub struct RegionSnapshot {
length: uint, length: uint,
@ -943,7 +943,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
// ______________________________________________________________________ // ______________________________________________________________________
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
enum Classification { Expanding, Contracting } enum Classification { Expanding, Contracting }
#[derive(Copy)] #[derive(Copy)]
@ -983,7 +983,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
} }
fn construct_var_data(&self) -> Vec<VarData> { fn construct_var_data(&self) -> Vec<VarData> {
range(0, self.num_vars() as uint).map(|_| { (0..self.num_vars() as uint).map(|_| {
VarData { VarData {
// All nodes are initially classified as contracting; during // All nodes are initially classified as contracting; during
// the expansion phase, we will shift the classification for // the expansion phase, we will shift the classification for
@ -1259,7 +1259,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
let mut opt_graph = None; let mut opt_graph = None;
for idx in range(0u, self.num_vars() as uint) { for idx in 0u..self.num_vars() as uint {
match var_data[idx].value { match var_data[idx].value {
Value(_) => { Value(_) => {
/* Inference successful */ /* Inference successful */
@ -1316,7 +1316,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
} }
} }
range(0, self.num_vars() as uint).map(|idx| var_data[idx].value).collect() (0..self.num_vars() as uint).map(|idx| var_data[idx].value).collect()
} }
fn construct_graph(&self) -> RegionGraph { fn construct_graph(&self) -> RegionGraph {
@ -1328,7 +1328,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
let mut graph = graph::Graph::with_capacity(num_vars as uint + 1, let mut graph = graph::Graph::with_capacity(num_vars as uint + 1,
num_edges); num_edges);
for _ in range(0, num_vars) { for _ in 0..num_vars {
graph.add_node(()); graph.add_node(());
} }
let dummy_idx = graph.add_node(()); let dummy_idx = graph.add_node(());

View file

@ -46,7 +46,7 @@ struct Delegate<'tcx>;
type Relation = (RelationDir, ty::TyVid); type Relation = (RelationDir, ty::TyVid);
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum RelationDir { pub enum RelationDir {
SubtypeOf, SupertypeOf, EqTo SubtypeOf, SupertypeOf, EqTo
} }

View file

@ -63,7 +63,7 @@ pub trait UnifyValue : Clone + PartialEq + Debug {
/// to keep the DAG relatively balanced, which helps keep the running /// to keep the DAG relatively balanced, which helps keep the running
/// time of the algorithm under control. For more information, see /// time of the algorithm under control. For more information, see
/// <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>. /// <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>.
#[derive(PartialEq,Clone,Show)] #[derive(PartialEq,Clone,Debug)]
pub enum VarValue<K:UnifyKey> { pub enum VarValue<K:UnifyKey> {
Redirect(K), Redirect(K),
Root(K::Value, uint), Root(K::Value, uint),

View file

@ -159,7 +159,7 @@ impl Clone for LiveNode {
} }
} }
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
enum LiveNodeKind { enum LiveNodeKind {
FreeVarNode(Span), FreeVarNode(Span),
ExprNode(Span), ExprNode(Span),
@ -245,13 +245,13 @@ struct CaptureInfo {
var_nid: NodeId var_nid: NodeId
} }
#[derive(Copy, Show)] #[derive(Copy, Debug)]
struct LocalInfo { struct LocalInfo {
id: NodeId, id: NodeId,
ident: ast::Ident ident: ast::Ident
} }
#[derive(Copy, Show)] #[derive(Copy, Debug)]
enum VarKind { enum VarKind {
Arg(NodeId, ast::Ident), Arg(NodeId, ast::Ident),
Local(LocalInfo), Local(LocalInfo),
@ -687,7 +687,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
{ {
let node_base_idx = self.idx(ln, Variable(0u)); let node_base_idx = self.idx(ln, Variable(0u));
let succ_base_idx = self.idx(succ_ln, Variable(0u)); let succ_base_idx = self.idx(succ_ln, Variable(0u));
for var_idx in range(0u, self.ir.num_vars) { for var_idx in 0u..self.ir.num_vars {
op(self, node_base_idx + var_idx, succ_base_idx + var_idx); op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
} }
} }
@ -700,7 +700,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
F: FnMut(uint) -> LiveNode, F: FnMut(uint) -> LiveNode,
{ {
let node_base_idx = self.idx(ln, Variable(0)); let node_base_idx = self.idx(ln, Variable(0));
for var_idx in range(0u, self.ir.num_vars) { for var_idx in 0u..self.ir.num_vars {
let idx = node_base_idx + var_idx; let idx = node_base_idx + var_idx;
if test(idx).is_valid() { if test(idx).is_valid() {
try!(write!(wr, " {:?}", Variable(var_idx))); try!(write!(wr, " {:?}", Variable(var_idx)));
@ -860,7 +860,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
// hack to skip the loop unless debug! is enabled: // hack to skip the loop unless debug! is enabled:
debug!("^^ liveness computation results for body {} (entry={:?})", debug!("^^ liveness computation results for body {} (entry={:?})",
{ {
for ln_idx in range(0u, self.ir.num_live_nodes) { for ln_idx in 0u..self.ir.num_live_nodes {
debug!("{:?}", self.ln_str(LiveNode(ln_idx))); debug!("{:?}", self.ln_str(LiveNode(ln_idx)));
} }
body.id body.id

View file

@ -87,7 +87,7 @@ use syntax::parse::token;
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
pub enum categorization<'tcx> { pub enum categorization<'tcx> {
cat_rvalue(ty::Region), // temporary val, argument is its scope cat_rvalue(ty::Region), // temporary val, argument is its scope
cat_static_item, cat_static_item,
@ -101,14 +101,14 @@ pub enum categorization<'tcx> {
} }
// Represents any kind of upvar // Represents any kind of upvar
#[derive(Clone, Copy, PartialEq, Show)] #[derive(Clone, Copy, PartialEq, Debug)]
pub struct Upvar { pub struct Upvar {
pub id: ty::UpvarId, pub id: ty::UpvarId,
pub kind: ty::ClosureKind pub kind: ty::ClosureKind
} }
// different kinds of pointers: // different kinds of pointers:
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum PointerKind { pub enum PointerKind {
/// `Box<T>` /// `Box<T>`
Unique, Unique,
@ -125,25 +125,25 @@ pub enum PointerKind {
// We use the term "interior" to mean "something reachable from the // We use the term "interior" to mean "something reachable from the
// base without a pointer dereference", e.g. a field // base without a pointer dereference", e.g. a field
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum InteriorKind { pub enum InteriorKind {
InteriorField(FieldName), InteriorField(FieldName),
InteriorElement(ElementKind), InteriorElement(ElementKind),
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum FieldName { pub enum FieldName {
NamedField(ast::Name), NamedField(ast::Name),
PositionalField(uint) PositionalField(uint)
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum ElementKind { pub enum ElementKind {
VecElement, VecElement,
OtherElement, OtherElement,
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum MutabilityCategory { pub enum MutabilityCategory {
McImmutable, // Immutable. McImmutable, // Immutable.
McDeclared, // Directly declared as mutable. McDeclared, // Directly declared as mutable.
@ -155,7 +155,7 @@ pub enum MutabilityCategory {
// Upvar categorization can generate a variable number of nested // Upvar categorization can generate a variable number of nested
// derefs. The note allows detecting them without deep pattern // derefs. The note allows detecting them without deep pattern
// matching on the categorization. // matching on the categorization.
#[derive(Clone, Copy, PartialEq, Show)] #[derive(Clone, Copy, PartialEq, Debug)]
pub enum Note { pub enum Note {
NoteClosureEnv(ty::UpvarId), // Deref through closure env NoteClosureEnv(ty::UpvarId), // Deref through closure env
NoteUpvarRef(ty::UpvarId), // Deref through by-ref upvar NoteUpvarRef(ty::UpvarId), // Deref through by-ref upvar
@ -176,7 +176,7 @@ pub enum Note {
// dereference, but its type is the type *before* the dereference // dereference, but its type is the type *before* the dereference
// (`@T`). So use `cmt.ty` to find the type of the value in a consistent // (`@T`). So use `cmt.ty` to find the type of the value in a consistent
// fashion. For more details, see the method `cat_pattern` // fashion. For more details, see the method `cat_pattern`
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
pub struct cmt_<'tcx> { pub struct cmt_<'tcx> {
pub id: ast::NodeId, // id of expr/pat producing this value pub id: ast::NodeId, // id of expr/pat producing this value
pub span: Span, // span of same expr/pat pub span: Span, // span of same expr/pat
@ -456,7 +456,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
debug!("cat_expr_autoderefd: autoderefs={}, cmt={}", debug!("cat_expr_autoderefd: autoderefs={}, cmt={}",
autoderefs, autoderefs,
cmt.repr(self.tcx())); cmt.repr(self.tcx()));
for deref in range(1u, autoderefs + 1) { for deref in 1u..autoderefs + 1 {
cmt = try!(self.cat_deref(expr, cmt, deref)); cmt = try!(self.cat_deref(expr, cmt, deref));
} }
return Ok(cmt); return Ok(cmt);

View file

@ -35,7 +35,7 @@ pub type PublicItems = NodeSet;
// FIXME: dox // FIXME: dox
pub type LastPrivateMap = NodeMap<LastPrivate>; pub type LastPrivateMap = NodeMap<LastPrivate>;
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum LastPrivate { pub enum LastPrivate {
LastMod(PrivateDep), LastMod(PrivateDep),
// `use` directives (imports) can refer to two separate definitions in the // `use` directives (imports) can refer to two separate definitions in the
@ -49,14 +49,14 @@ pub enum LastPrivate {
type_used: ImportUse}, type_used: ImportUse},
} }
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum PrivateDep { pub enum PrivateDep {
AllPublic, AllPublic,
DependsOn(ast::DefId), DependsOn(ast::DefId),
} }
// How an import is used. // How an import is used.
#[derive(Copy, PartialEq, Show)] #[derive(Copy, PartialEq, Debug)]
pub enum ImportUse { pub enum ImportUse {
Unused, // The import is not used. Unused, // The import is not used.
Used, // The import is used. Used, // The import is used.

View file

@ -37,7 +37,7 @@ use syntax::visit::{Visitor, FnKind};
/// actually attach a more meaningful ordering to scopes than the one /// actually attach a more meaningful ordering to scopes than the one
/// generated via deriving here. /// generated via deriving here.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
RustcDecodable, Show, Copy)] RustcDecodable, Debug, Copy)]
pub enum CodeExtent { pub enum CodeExtent {
Misc(ast::NodeId), Misc(ast::NodeId),
Remainder(BlockRemainder), Remainder(BlockRemainder),
@ -61,7 +61,7 @@ pub enum CodeExtent {
/// * the subscope with `first_statement_index == 1` is scope of `c`, /// * the subscope with `first_statement_index == 1` is scope of `c`,
/// and thus does not include EXPR_2, but covers the `...`. /// and thus does not include EXPR_2, but covers the `...`.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
RustcDecodable, Show, Copy)] RustcDecodable, Debug, Copy)]
pub struct BlockRemainder { pub struct BlockRemainder {
pub block: ast::NodeId, pub block: ast::NodeId,
pub first_statement_index: uint, pub first_statement_index: uint,
@ -179,7 +179,7 @@ pub struct RegionMaps {
/// Carries the node id for the innermost block or match expression, /// Carries the node id for the innermost block or match expression,
/// for building up the `var_map` which maps ids to the blocks in /// for building up the `var_map` which maps ids to the blocks in
/// which they were declared. /// which they were declared.
#[derive(PartialEq, Eq, Show, Copy)] #[derive(PartialEq, Eq, Debug, Copy)]
enum InnermostDeclaringBlock { enum InnermostDeclaringBlock {
None, None,
Block(ast::NodeId), Block(ast::NodeId),
@ -204,7 +204,7 @@ impl InnermostDeclaringBlock {
/// Contextual information for declarations introduced by a statement /// Contextual information for declarations introduced by a statement
/// (i.e. `let`). It carries node-id's for statement and enclosing /// (i.e. `let`). It carries node-id's for statement and enclosing
/// block both, as well as the statement's index within the block. /// block both, as well as the statement's index within the block.
#[derive(PartialEq, Eq, Show, Copy)] #[derive(PartialEq, Eq, Debug, Copy)]
struct DeclaringStatementContext { struct DeclaringStatementContext {
stmt_id: ast::NodeId, stmt_id: ast::NodeId,
block_id: ast::NodeId, block_id: ast::NodeId,
@ -220,7 +220,7 @@ impl DeclaringStatementContext {
} }
} }
#[derive(PartialEq, Eq, Show, Copy)] #[derive(PartialEq, Eq, Debug, Copy)]
enum InnermostEnclosingExpr { enum InnermostEnclosingExpr {
None, None,
Some(ast::NodeId), Some(ast::NodeId),
@ -242,7 +242,7 @@ impl InnermostEnclosingExpr {
} }
} }
#[derive(Show, Copy)] #[derive(Debug, Copy)]
pub struct Context { pub struct Context {
var_parent: InnermostDeclaringBlock, var_parent: InnermostDeclaringBlock,

View file

@ -33,7 +33,7 @@ use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub enum DefRegion { pub enum DefRegion {
DefStaticRegion, DefStaticRegion,
DefEarlyBoundRegion(/* space */ subst::ParamSpace, DefEarlyBoundRegion(/* space */ subst::ParamSpace,
@ -404,7 +404,7 @@ impl<'a> LifetimeContext<'a> {
} }
fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec<ast::LifetimeDef>) { fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec<ast::LifetimeDef>) {
for i in range(0, lifetimes.len()) { for i in 0..lifetimes.len() {
let lifetime_i = &lifetimes[i]; let lifetime_i = &lifetimes[i];
let special_idents = [special_idents::static_lifetime]; let special_idents = [special_idents::static_lifetime];
@ -417,7 +417,7 @@ impl<'a> LifetimeContext<'a> {
} }
// It is a hard error to shadow a lifetime within the same scope. // It is a hard error to shadow a lifetime within the same scope.
for j in range(i + 1, lifetimes.len()) { for j in i + 1..lifetimes.len() {
let lifetime_j = &lifetimes[j]; let lifetime_j = &lifetimes[j];
if lifetime_i.lifetime.name == lifetime_j.lifetime.name { if lifetime_i.lifetime.name == lifetime_j.lifetime.name {

View file

@ -28,7 +28,7 @@ use syntax::codemap::{Span, DUMMY_SP};
/// identify each in-scope parameter by an *index* and a *parameter /// identify each in-scope parameter by an *index* and a *parameter
/// space* (which indices where the parameter is defined; see /// space* (which indices where the parameter is defined; see
/// `ParamSpace`). /// `ParamSpace`).
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Substs<'tcx> { pub struct Substs<'tcx> {
pub types: VecPerParamSpace<Ty<'tcx>>, pub types: VecPerParamSpace<Ty<'tcx>>,
pub regions: RegionSubsts, pub regions: RegionSubsts,
@ -37,7 +37,7 @@ pub struct Substs<'tcx> {
/// Represents the values to use when substituting lifetime parameters. /// Represents the values to use when substituting lifetime parameters.
/// If the value is `ErasedRegions`, then this subst is occurring during /// If the value is `ErasedRegions`, then this subst is occurring during
/// trans, and all region parameters will be replaced with `ty::ReStatic`. /// trans, and all region parameters will be replaced with `ty::ReStatic`.
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum RegionSubsts { pub enum RegionSubsts {
ErasedRegions, ErasedRegions,
NonerasedRegions(VecPerParamSpace<ty::Region>) NonerasedRegions(VecPerParamSpace<ty::Region>)
@ -180,7 +180,7 @@ impl RegionSubsts {
// ParamSpace // ParamSpace
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy, #[derive(PartialOrd, Ord, PartialEq, Eq, Copy,
Clone, Hash, RustcEncodable, RustcDecodable, Show)] Clone, Hash, RustcEncodable, RustcDecodable, Debug)]
pub enum ParamSpace { pub enum ParamSpace {
TypeSpace, // Type parameters attached to a type definition, trait, or impl TypeSpace, // Type parameters attached to a type definition, trait, or impl
SelfSpace, // Self parameter on a trait SelfSpace, // Self parameter on a trait

View file

@ -147,7 +147,7 @@ pub type TraitObligations<'tcx> = subst::VecPerParamSpace<TraitObligation<'tcx>>
pub type Selection<'tcx> = Vtable<'tcx, PredicateObligation<'tcx>>; pub type Selection<'tcx> = Vtable<'tcx, PredicateObligation<'tcx>>;
#[derive(Clone,Show)] #[derive(Clone,Debug)]
pub enum SelectionError<'tcx> { pub enum SelectionError<'tcx> {
Unimplemented, Unimplemented,
Overflow, Overflow,
@ -215,7 +215,7 @@ pub type SelectionResult<'tcx, T> = Result<Option<T>, SelectionError<'tcx>>;
/// ### The type parameter `N` /// ### The type parameter `N`
/// ///
/// See explanation on `VtableImplData`. /// See explanation on `VtableImplData`.
#[derive(Show,Clone)] #[derive(Debug,Clone)]
pub enum Vtable<'tcx, N> { pub enum Vtable<'tcx, N> {
/// Vtable identifying a particular impl. /// Vtable identifying a particular impl.
VtableImpl(VtableImplData<'tcx, N>), VtableImpl(VtableImplData<'tcx, N>),
@ -258,7 +258,7 @@ pub struct VtableImplData<'tcx, N> {
pub nested: subst::VecPerParamSpace<N> pub nested: subst::VecPerParamSpace<N>
} }
#[derive(Show,Clone)] #[derive(Debug,Clone)]
pub struct VtableBuiltinData<N> { pub struct VtableBuiltinData<N> {
pub nested: subst::VecPerParamSpace<N> pub nested: subst::VecPerParamSpace<N>
} }

View file

@ -36,7 +36,7 @@ pub enum ObjectSafetyViolation<'tcx> {
} }
/// Reasons a method might not be object-safe. /// Reasons a method might not be object-safe.
#[derive(Copy,Clone,Show)] #[derive(Copy,Clone,Debug)]
pub enum MethodViolationCode { pub enum MethodViolationCode {
/// e.g., `fn(self)` /// e.g., `fn(self)`
ByValueSelf, ByValueSelf,

View file

@ -96,7 +96,7 @@ pub enum MethodMatchResult {
MethodDidNotMatch, MethodDidNotMatch,
} }
#[derive(Copy, Show)] #[derive(Copy, Debug)]
pub enum MethodMatchedData { pub enum MethodMatchedData {
// In the case of a precise match, we don't really need to store // In the case of a precise match, we don't really need to store
// how the match was found. So don't. // how the match was found. So don't.
@ -131,7 +131,7 @@ pub enum MethodMatchedData {
/// matching where clause. Part of the reason for this is that where /// matching where clause. Part of the reason for this is that where
/// clauses can give additional information (like, the types of output /// clauses can give additional information (like, the types of output
/// parameters) that would have to be inferred from the impl. /// parameters) that would have to be inferred from the impl.
#[derive(PartialEq,Eq,Show,Clone)] #[derive(PartialEq,Eq,Debug,Clone)]
enum SelectionCandidate<'tcx> { enum SelectionCandidate<'tcx> {
BuiltinCandidate(ty::BuiltinBound), BuiltinCandidate(ty::BuiltinBound),
ParamCandidate(ty::PolyTraitRef<'tcx>), ParamCandidate(ty::PolyTraitRef<'tcx>),
@ -172,7 +172,7 @@ enum BuiltinBoundConditions<'tcx> {
AmbiguousBuiltin AmbiguousBuiltin
} }
#[derive(Show)] #[derive(Debug)]
enum EvaluationResult<'tcx> { enum EvaluationResult<'tcx> {
EvaluatedToOk, EvaluatedToOk,
EvaluatedToAmbig, EvaluatedToAmbig,
@ -595,7 +595,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let mut i = 0; let mut i = 0;
while i < candidates.len() { while i < candidates.len() {
let is_dup = let is_dup =
range(0, candidates.len()) (0..candidates.len())
.filter(|&j| i != j) .filter(|&j| i != j)
.any(|j| self.candidate_should_be_dropped_in_favor_of(stack, .any(|j| self.candidate_should_be_dropped_in_favor_of(stack,
&candidates[i], &candidates[i],

View file

@ -112,7 +112,7 @@ pub struct field<'tcx> {
pub mt: mt<'tcx> pub mt: mt<'tcx>
} }
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub enum ImplOrTraitItemContainer { pub enum ImplOrTraitItemContainer {
TraitContainer(ast::DefId), TraitContainer(ast::DefId),
ImplContainer(ast::DefId), ImplContainer(ast::DefId),
@ -127,7 +127,7 @@ impl ImplOrTraitItemContainer {
} }
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum ImplOrTraitItem<'tcx> { pub enum ImplOrTraitItem<'tcx> {
MethodTraitItem(Rc<Method<'tcx>>), MethodTraitItem(Rc<Method<'tcx>>),
TypeTraitItem(Rc<AssociatedType>), TypeTraitItem(Rc<AssociatedType>),
@ -172,7 +172,7 @@ impl<'tcx> ImplOrTraitItem<'tcx> {
} }
} }
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub enum ImplOrTraitItemId { pub enum ImplOrTraitItemId {
MethodTraitItemId(ast::DefId), MethodTraitItemId(ast::DefId),
TypeTraitItemId(ast::DefId), TypeTraitItemId(ast::DefId),
@ -187,7 +187,7 @@ impl ImplOrTraitItemId {
} }
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct Method<'tcx> { pub struct Method<'tcx> {
pub name: ast::Name, pub name: ast::Name,
pub generics: ty::Generics<'tcx>, pub generics: ty::Generics<'tcx>,
@ -231,7 +231,7 @@ impl<'tcx> Method<'tcx> {
} }
} }
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub struct AssociatedType { pub struct AssociatedType {
pub name: ast::Name, pub name: ast::Name,
pub vis: ast::Visibility, pub vis: ast::Visibility,
@ -239,13 +239,13 @@ pub struct AssociatedType {
pub container: ImplOrTraitItemContainer, pub container: ImplOrTraitItemContainer,
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct mt<'tcx> { pub struct mt<'tcx> {
pub ty: Ty<'tcx>, pub ty: Ty<'tcx>,
pub mutbl: ast::Mutability, pub mutbl: ast::Mutability,
} }
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub struct field_ty { pub struct field_ty {
pub name: Name, pub name: Name,
pub id: DefId, pub id: DefId,
@ -274,7 +274,7 @@ pub struct ItemVariances {
pub regions: VecPerParamSpace<Variance>, pub regions: VecPerParamSpace<Variance>,
} }
#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable, Show, Copy)] #[derive(Clone, PartialEq, RustcDecodable, RustcEncodable, Debug, Copy)]
pub enum Variance { pub enum Variance {
Covariant, // T<A> <: T<B> iff A <: B -- e.g., function return type Covariant, // T<A> <: T<B> iff A <: B -- e.g., function return type
Invariant, // T<A> <: T<B> iff B == A -- e.g., type of mutable cell Invariant, // T<A> <: T<B> iff B == A -- e.g., type of mutable cell
@ -282,13 +282,13 @@ pub enum Variance {
Bivariant, // T<A> <: T<B> -- e.g., unused type parameter Bivariant, // T<A> <: T<B> -- e.g., unused type parameter
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum AutoAdjustment<'tcx> { pub enum AutoAdjustment<'tcx> {
AdjustReifyFnPointer(ast::DefId), // go from a fn-item type to a fn-pointer type AdjustReifyFnPointer(ast::DefId), // go from a fn-item type to a fn-pointer type
AdjustDerefRef(AutoDerefRef<'tcx>) AdjustDerefRef(AutoDerefRef<'tcx>)
} }
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
pub enum UnsizeKind<'tcx> { pub enum UnsizeKind<'tcx> {
// [T, ..n] -> [T], the uint field is n. // [T, ..n] -> [T], the uint field is n.
UnsizeLength(uint), UnsizeLength(uint),
@ -298,13 +298,13 @@ pub enum UnsizeKind<'tcx> {
UnsizeVtable(TyTrait<'tcx>, /* the self type of the trait */ Ty<'tcx>) UnsizeVtable(TyTrait<'tcx>, /* the self type of the trait */ Ty<'tcx>)
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct AutoDerefRef<'tcx> { pub struct AutoDerefRef<'tcx> {
pub autoderefs: uint, pub autoderefs: uint,
pub autoref: Option<AutoRef<'tcx>> pub autoref: Option<AutoRef<'tcx>>
} }
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
pub enum AutoRef<'tcx> { pub enum AutoRef<'tcx> {
/// Convert from T to &T /// Convert from T to &T
/// The third field allows us to wrap other AutoRef adjustments. /// The third field allows us to wrap other AutoRef adjustments.
@ -421,13 +421,13 @@ pub fn type_of_adjust<'tcx>(cx: &ctxt<'tcx>, adj: &AutoAdjustment<'tcx>) -> Opti
} }
} }
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Show)] #[derive(Clone, Copy, RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Debug)]
pub struct param_index { pub struct param_index {
pub space: subst::ParamSpace, pub space: subst::ParamSpace,
pub index: uint pub index: uint
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub enum MethodOrigin<'tcx> { pub enum MethodOrigin<'tcx> {
// fully statically resolved method // fully statically resolved method
MethodStatic(ast::DefId), MethodStatic(ast::DefId),
@ -445,7 +445,7 @@ pub enum MethodOrigin<'tcx> {
// details for a method invoked with a receiver whose type is a type parameter // details for a method invoked with a receiver whose type is a type parameter
// with a bounded trait. // with a bounded trait.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct MethodParam<'tcx> { pub struct MethodParam<'tcx> {
// the precise trait reference that occurs as a bound -- this may // the precise trait reference that occurs as a bound -- this may
// be a supertrait of what the user actually typed. Note that it // be a supertrait of what the user actually typed. Note that it
@ -466,7 +466,7 @@ pub struct MethodParam<'tcx> {
} }
// details for a method invoked with a receiver whose type is an object // details for a method invoked with a receiver whose type is an object
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct MethodObject<'tcx> { pub struct MethodObject<'tcx> {
// the (super)trait containing the method to be invoked // the (super)trait containing the method to be invoked
pub trait_ref: Rc<ty::TraitRef<'tcx>>, pub trait_ref: Rc<ty::TraitRef<'tcx>>,
@ -503,13 +503,13 @@ pub struct MethodCallee<'tcx> {
/// needed to add to the side tables. Thus to disambiguate /// needed to add to the side tables. Thus to disambiguate
/// we also keep track of whether there's an adjustment in /// we also keep track of whether there's an adjustment in
/// our key. /// our key.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct MethodCall { pub struct MethodCall {
pub expr_id: ast::NodeId, pub expr_id: ast::NodeId,
pub adjustment: ExprAdjustment pub adjustment: ExprAdjustment
} }
#[derive(Clone, PartialEq, Eq, Hash, Show, RustcEncodable, RustcDecodable, Copy)] #[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy)]
pub enum ExprAdjustment { pub enum ExprAdjustment {
NoAdjustment, NoAdjustment,
AutoDeref(uint), AutoDeref(uint),
@ -923,7 +923,7 @@ impl<'tcx> ctxt<'tcx> {
} }
} }
#[derive(Show)] #[derive(Debug)]
pub struct TyS<'tcx> { pub struct TyS<'tcx> {
pub sty: sty<'tcx>, pub sty: sty<'tcx>,
pub flags: TypeFlags, pub flags: TypeFlags,
@ -1029,21 +1029,21 @@ pub fn type_escapes_depth(ty: Ty, depth: u32) -> bool {
ty.region_depth > depth ty.region_depth > depth
} }
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct BareFnTy<'tcx> { pub struct BareFnTy<'tcx> {
pub unsafety: ast::Unsafety, pub unsafety: ast::Unsafety,
pub abi: abi::Abi, pub abi: abi::Abi,
pub sig: PolyFnSig<'tcx>, pub sig: PolyFnSig<'tcx>,
} }
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ClosureTy<'tcx> { pub struct ClosureTy<'tcx> {
pub unsafety: ast::Unsafety, pub unsafety: ast::Unsafety,
pub abi: abi::Abi, pub abi: abi::Abi,
pub sig: PolyFnSig<'tcx>, pub sig: PolyFnSig<'tcx>,
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum FnOutput<'tcx> { pub enum FnOutput<'tcx> {
FnConverging(Ty<'tcx>), FnConverging(Ty<'tcx>),
FnDiverging FnDiverging
@ -1100,7 +1100,7 @@ impl<'tcx> PolyFnSig<'tcx> {
} }
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct ParamTy { pub struct ParamTy {
pub space: subst::ParamSpace, pub space: subst::ParamSpace,
pub idx: u32, pub idx: u32,
@ -1146,7 +1146,7 @@ pub struct ParamTy {
/// is the outer fn. /// is the outer fn.
/// ///
/// [dbi]: http://en.wikipedia.org/wiki/De_Bruijn_index /// [dbi]: http://en.wikipedia.org/wiki/De_Bruijn_index
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show, Copy)] #[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct DebruijnIndex { pub struct DebruijnIndex {
// We maintain the invariant that this is never 0. So 1 indicates // We maintain the invariant that this is never 0. So 1 indicates
// the innermost binder. To ensure this, create with `DebruijnIndex::new`. // the innermost binder. To ensure this, create with `DebruijnIndex::new`.
@ -1154,7 +1154,7 @@ pub struct DebruijnIndex {
} }
/// Representation of regions: /// Representation of regions:
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show, Copy)] #[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum Region { pub enum Region {
// Region bound in a type or fn declaration which will be // Region bound in a type or fn declaration which will be
// substituted 'early' -- that is, at the same time when type // substituted 'early' -- that is, at the same time when type
@ -1195,13 +1195,13 @@ pub enum Region {
/// Upvars do not get their own node-id. Instead, we use the pair of /// Upvars do not get their own node-id. Instead, we use the pair of
/// the original var id (that is, the root variable that is referenced /// the original var id (that is, the root variable that is referenced
/// by the upvar) and the id of the closure expression. /// by the upvar) and the id of the closure expression.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct UpvarId { pub struct UpvarId {
pub var_id: ast::NodeId, pub var_id: ast::NodeId,
pub closure_expr_id: ast::NodeId, pub closure_expr_id: ast::NodeId,
} }
#[derive(Clone, PartialEq, Eq, Hash, Show, RustcEncodable, RustcDecodable, Copy)] #[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy)]
pub enum BorrowKind { pub enum BorrowKind {
/// Data must be immutable and is aliasable. /// Data must be immutable and is aliasable.
ImmBorrow, ImmBorrow,
@ -1294,7 +1294,7 @@ pub enum BorrowKind {
/// - Through mutation, the borrowed upvars can actually escape /// - Through mutation, the borrowed upvars can actually escape
/// the closure, so sometimes it is necessary for them to be larger /// the closure, so sometimes it is necessary for them to be larger
/// than the closure lifetime itself. /// than the closure lifetime itself.
#[derive(PartialEq, Clone, RustcEncodable, RustcDecodable, Show, Copy)] #[derive(PartialEq, Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct UpvarBorrow { pub struct UpvarBorrow {
pub kind: BorrowKind, pub kind: BorrowKind,
pub region: ty::Region, pub region: ty::Region,
@ -1320,7 +1320,7 @@ impl Region {
} }
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
RustcEncodable, RustcDecodable, Show, Copy)] RustcEncodable, RustcDecodable, Debug, Copy)]
/// A "free" region `fr` can be interpreted as "some region /// A "free" region `fr` can be interpreted as "some region
/// at least as big as the scope `fr.scope`". /// at least as big as the scope `fr.scope`".
pub struct FreeRegion { pub struct FreeRegion {
@ -1329,7 +1329,7 @@ pub struct FreeRegion {
} }
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
RustcEncodable, RustcDecodable, Show, Copy)] RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum BoundRegion { pub enum BoundRegion {
/// An anonymous region parameter for a given fn (&T) /// An anonymous region parameter for a given fn (&T)
BrAnon(u32), BrAnon(u32),
@ -1350,7 +1350,7 @@ pub enum BoundRegion {
// NB: If you change this, you'll probably want to change the corresponding // NB: If you change this, you'll probably want to change the corresponding
// AST structure in libsyntax/ast.rs as well. // AST structure in libsyntax/ast.rs as well.
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum sty<'tcx> { pub enum sty<'tcx> {
ty_bool, ty_bool,
ty_char, ty_char,
@ -1397,7 +1397,7 @@ pub enum sty<'tcx> {
// on non-useful type error messages) // on non-useful type error messages)
} }
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct TyTrait<'tcx> { pub struct TyTrait<'tcx> {
pub principal: ty::PolyTraitRef<'tcx>, pub principal: ty::PolyTraitRef<'tcx>,
pub bounds: ExistentialBounds<'tcx>, pub bounds: ExistentialBounds<'tcx>,
@ -1469,7 +1469,7 @@ impl<'tcx> TyTrait<'tcx> {
/// Note that a `TraitRef` introduces a level of region binding, to /// Note that a `TraitRef` introduces a level of region binding, to
/// account for higher-ranked trait bounds like `T : for<'a> Foo<&'a /// account for higher-ranked trait bounds like `T : for<'a> Foo<&'a
/// U>` or higher-ranked object types. /// U>` or higher-ranked object types.
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct TraitRef<'tcx> { pub struct TraitRef<'tcx> {
pub def_id: DefId, pub def_id: DefId,
pub substs: &'tcx Substs<'tcx>, pub substs: &'tcx Substs<'tcx>,
@ -1509,7 +1509,7 @@ impl<'tcx> PolyTraitRef<'tcx> {
/// erase, or otherwise "discharge" these bound reons, we change the /// erase, or otherwise "discharge" these bound reons, we change the
/// type from `Binder<T>` to just `T` (see /// type from `Binder<T>` to just `T` (see
/// e.g. `liberate_late_bound_regions`). /// e.g. `liberate_late_bound_regions`).
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Binder<T>(pub T); pub struct Binder<T>(pub T);
#[derive(Clone, Copy, PartialEq)] #[derive(Clone, Copy, PartialEq)]
@ -1518,7 +1518,7 @@ pub enum IntVarValue {
UintType(ast::UintTy), UintType(ast::UintTy),
} }
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub enum terr_vstore_kind { pub enum terr_vstore_kind {
terr_vec, terr_vec,
terr_str, terr_str,
@ -1526,14 +1526,14 @@ pub enum terr_vstore_kind {
terr_trait terr_trait
} }
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub struct expected_found<T> { pub struct expected_found<T> {
pub expected: T, pub expected: T,
pub found: T pub found: T
} }
// Data structures used in type unification // Data structures used in type unification
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub enum type_err<'tcx> { pub enum type_err<'tcx> {
terr_mismatch, terr_mismatch,
terr_unsafety_mismatch(expected_found<ast::Unsafety>), terr_unsafety_mismatch(expected_found<ast::Unsafety>),
@ -1567,7 +1567,7 @@ pub enum type_err<'tcx> {
/// Bounds suitable for a named type parameter like `A` in `fn foo<A>` /// Bounds suitable for a named type parameter like `A` in `fn foo<A>`
/// as well as the existential type parameter in an object type. /// as well as the existential type parameter in an object type.
#[derive(PartialEq, Eq, Hash, Clone, Show)] #[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct ParamBounds<'tcx> { pub struct ParamBounds<'tcx> {
pub region_bounds: Vec<ty::Region>, pub region_bounds: Vec<ty::Region>,
pub builtin_bounds: BuiltinBounds, pub builtin_bounds: BuiltinBounds,
@ -1580,7 +1580,7 @@ pub struct ParamBounds<'tcx> {
/// major difference between this case and `ParamBounds` is that /// major difference between this case and `ParamBounds` is that
/// general purpose trait bounds are omitted and there must be /// general purpose trait bounds are omitted and there must be
/// *exactly one* region. /// *exactly one* region.
#[derive(PartialEq, Eq, Hash, Clone, Show)] #[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct ExistentialBounds<'tcx> { pub struct ExistentialBounds<'tcx> {
pub region_bound: ty::Region, pub region_bound: ty::Region,
pub builtin_bounds: BuiltinBounds, pub builtin_bounds: BuiltinBounds,
@ -1590,7 +1590,7 @@ pub struct ExistentialBounds<'tcx> {
pub type BuiltinBounds = EnumSet<BuiltinBound>; pub type BuiltinBounds = EnumSet<BuiltinBound>;
#[derive(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash, #[derive(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash,
Show, Copy)] Debug, Copy)]
#[repr(uint)] #[repr(uint)]
pub enum BuiltinBound { pub enum BuiltinBound {
BoundSend, BoundSend,
@ -1664,7 +1664,7 @@ pub enum InferTy {
FreshIntTy(u32), FreshIntTy(u32),
} }
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Show, Copy)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub enum UnconstrainedNumeric { pub enum UnconstrainedNumeric {
UnconstrainedFloat, UnconstrainedFloat,
UnconstrainedInt, UnconstrainedInt,
@ -1672,7 +1672,7 @@ pub enum UnconstrainedNumeric {
} }
#[derive(Clone, RustcEncodable, RustcDecodable, Eq, Hash, Show, Copy)] #[derive(Clone, RustcEncodable, RustcDecodable, Eq, Hash, Debug, Copy)]
pub enum InferRegion { pub enum InferRegion {
ReVar(RegionVid), ReVar(RegionVid),
ReSkolemized(u32, BoundRegion) ReSkolemized(u32, BoundRegion)
@ -1746,7 +1746,7 @@ impl fmt::Debug for IntVarValue {
} }
} }
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct TypeParameterDef<'tcx> { pub struct TypeParameterDef<'tcx> {
pub name: ast::Name, pub name: ast::Name,
pub def_id: ast::DefId, pub def_id: ast::DefId,
@ -1756,7 +1756,7 @@ pub struct TypeParameterDef<'tcx> {
pub default: Option<Ty<'tcx>>, pub default: Option<Ty<'tcx>>,
} }
#[derive(RustcEncodable, RustcDecodable, Clone, Show)] #[derive(RustcEncodable, RustcDecodable, Clone, Debug)]
pub struct RegionParameterDef { pub struct RegionParameterDef {
pub name: ast::Name, pub name: ast::Name,
pub def_id: ast::DefId, pub def_id: ast::DefId,
@ -1773,7 +1773,7 @@ impl RegionParameterDef {
/// Information about the formal type/lifetime parameters associated /// Information about the formal type/lifetime parameters associated
/// with an item or method. Analogous to ast::Generics. /// with an item or method. Analogous to ast::Generics.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct Generics<'tcx> { pub struct Generics<'tcx> {
pub types: VecPerParamSpace<TypeParameterDef<'tcx>>, pub types: VecPerParamSpace<TypeParameterDef<'tcx>>,
pub regions: VecPerParamSpace<RegionParameterDef>, pub regions: VecPerParamSpace<RegionParameterDef>,
@ -1809,7 +1809,7 @@ impl<'tcx> Generics<'tcx> {
} }
} }
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum Predicate<'tcx> { pub enum Predicate<'tcx> {
/// Corresponds to `where Foo : Bar<A,B,C>`. `Foo` here would be /// Corresponds to `where Foo : Bar<A,B,C>`. `Foo` here would be
/// the `Self` type of the trait reference and `A`, `B`, and `C` /// the `Self` type of the trait reference and `A`, `B`, and `C`
@ -1830,7 +1830,7 @@ pub enum Predicate<'tcx> {
Projection(PolyProjectionPredicate<'tcx>), Projection(PolyProjectionPredicate<'tcx>),
} }
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct TraitPredicate<'tcx> { pub struct TraitPredicate<'tcx> {
pub trait_ref: Rc<TraitRef<'tcx>> pub trait_ref: Rc<TraitRef<'tcx>>
} }
@ -1856,11 +1856,11 @@ impl<'tcx> PolyTraitPredicate<'tcx> {
} }
} }
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct EquatePredicate<'tcx>(pub Ty<'tcx>, pub Ty<'tcx>); // `0 == 1` pub struct EquatePredicate<'tcx>(pub Ty<'tcx>, pub Ty<'tcx>); // `0 == 1`
pub type PolyEquatePredicate<'tcx> = ty::Binder<EquatePredicate<'tcx>>; pub type PolyEquatePredicate<'tcx> = ty::Binder<EquatePredicate<'tcx>>;
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct OutlivesPredicate<A,B>(pub A, pub B); // `A : B` pub struct OutlivesPredicate<A,B>(pub A, pub B); // `A : B`
pub type PolyOutlivesPredicate<A,B> = ty::Binder<OutlivesPredicate<A,B>>; pub type PolyOutlivesPredicate<A,B> = ty::Binder<OutlivesPredicate<A,B>>;
pub type PolyRegionOutlivesPredicate = PolyOutlivesPredicate<ty::Region, ty::Region>; pub type PolyRegionOutlivesPredicate = PolyOutlivesPredicate<ty::Region, ty::Region>;
@ -1878,7 +1878,7 @@ pub type PolyTypeOutlivesPredicate<'tcx> = PolyOutlivesPredicate<Ty<'tcx>, ty::R
/// equality between arbitrary types. Processing an instance of Form /// equality between arbitrary types. Processing an instance of Form
/// #2 eventually yields one of these `ProjectionPredicate` /// #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS. /// instances to normalize the LHS.
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ProjectionPredicate<'tcx> { pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>, pub projection_ty: ProjectionTy<'tcx>,
pub ty: Ty<'tcx>, pub ty: Ty<'tcx>,
@ -1898,7 +1898,7 @@ impl<'tcx> PolyProjectionPredicate<'tcx> {
/// Represents the projection of an associated type. In explicit UFCS /// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`. /// form this would be written `<T as Trait<..>>::N`.
#[derive(Clone, PartialEq, Eq, Hash, Show)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ProjectionTy<'tcx> { pub struct ProjectionTy<'tcx> {
/// The trait reference `T as Trait<..>`. /// The trait reference `T as Trait<..>`.
pub trait_ref: Rc<ty::TraitRef<'tcx>>, pub trait_ref: Rc<ty::TraitRef<'tcx>>,
@ -2034,7 +2034,7 @@ impl<'tcx> Predicate<'tcx> {
/// `[[], [U:Bar<T>]]`. Now if there were some particular reference /// `[[], [U:Bar<T>]]`. Now if there were some particular reference
/// like `Foo<int,uint>`, then the `GenericBounds` would be `[[], /// like `Foo<int,uint>`, then the `GenericBounds` would be `[[],
/// [uint:Bar<int>]]`. /// [uint:Bar<int>]]`.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct GenericBounds<'tcx> { pub struct GenericBounds<'tcx> {
pub predicates: VecPerParamSpace<Predicate<'tcx>>, pub predicates: VecPerParamSpace<Predicate<'tcx>>,
} }
@ -2243,7 +2243,7 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> {
/// stray references in a comment or something). We try to reserve the /// stray references in a comment or something). We try to reserve the
/// "poly" prefix to refer to higher-ranked things, as in /// "poly" prefix to refer to higher-ranked things, as in
/// `PolyTraitRef`. /// `PolyTraitRef`.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct TypeScheme<'tcx> { pub struct TypeScheme<'tcx> {
pub generics: Generics<'tcx>, pub generics: Generics<'tcx>,
pub ty: Ty<'tcx> pub ty: Ty<'tcx>
@ -2286,7 +2286,7 @@ pub struct Closure<'tcx> {
pub kind: ClosureKind, pub kind: ClosureKind,
} }
#[derive(Clone, Copy, PartialEq, Eq, Show)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum ClosureKind { pub enum ClosureKind {
FnClosureKind, FnClosureKind,
FnMutClosureKind, FnMutClosureKind,
@ -3745,7 +3745,7 @@ pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool {
/// ///
/// The ordering of the cases is significant. They are sorted so that cmp::max /// The ordering of the cases is significant. They are sorted so that cmp::max
/// will keep the "more erroneous" of two values. /// will keep the "more erroneous" of two values.
#[derive(Copy, PartialOrd, Ord, Eq, PartialEq, Show)] #[derive(Copy, PartialOrd, Ord, Eq, PartialEq, Debug)]
pub enum Representability { pub enum Representability {
Representable, Representable,
ContainsRecursive, ContainsRecursive,
@ -4344,7 +4344,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
let mut adjusted_ty = unadjusted_ty; let mut adjusted_ty = unadjusted_ty;
if !ty::type_is_error(adjusted_ty) { if !ty::type_is_error(adjusted_ty) {
for i in range(0, adj.autoderefs) { for i in 0..adj.autoderefs {
let method_call = MethodCall::autoderef(expr_id, i); let method_call = MethodCall::autoderef(expr_id, i);
match method_type(method_call) { match method_type(method_call) {
Some(method_ty) => { Some(method_ty) => {
@ -6536,7 +6536,7 @@ impl<'a,'tcx> ClosureTyper<'tcx> for ty::ParameterEnvironment<'a,'tcx> {
/// The category of explicit self. /// The category of explicit self.
#[derive(Clone, Copy, Eq, PartialEq, Show)] #[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub enum ExplicitSelfCategory { pub enum ExplicitSelfCategory {
StaticExplicitSelfCategory, StaticExplicitSelfCategory,
ByValueExplicitSelfCategory, ByValueExplicitSelfCategory,

View file

@ -249,7 +249,7 @@ pub enum EntryFnType {
EntryNone, EntryNone,
} }
#[derive(Copy, PartialEq, PartialOrd, Clone, Ord, Eq, Hash, Show)] #[derive(Copy, PartialEq, PartialOrd, Clone, Ord, Eq, Hash, Debug)]
pub enum CrateType { pub enum CrateType {
CrateTypeExecutable, CrateTypeExecutable,
CrateTypeDylib, CrateTypeDylib,
@ -672,7 +672,7 @@ pub fn optgroups() -> Vec<getopts::OptGroup> {
.collect() .collect()
} }
#[derive(Copy, Clone, PartialEq, Eq, Show)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum OptionStability { Stable, Unstable } pub enum OptionStability { Stable, Unstable }
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]

View file

@ -10,7 +10,7 @@
use std::slice; use std::slice;
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct SearchPaths { pub struct SearchPaths {
paths: Vec<(PathKind, Path)>, paths: Vec<(PathKind, Path)>,
} }
@ -20,7 +20,7 @@ pub struct Iter<'a> {
iter: slice::Iter<'a, (PathKind, Path)>, iter: slice::Iter<'a, (PathKind, Path)>,
} }
#[derive(Eq, PartialEq, Clone, Copy, Show)] #[derive(Eq, PartialEq, Clone, Copy, Debug)]
pub enum PathKind { pub enum PathKind {
Native, Native,
Crate, Crate,

View file

@ -27,7 +27,7 @@ pub const FN_OUTPUT_NAME: &'static str = "Output";
// Useful type to use with `Result<>` indicate that an error has already // Useful type to use with `Result<>` indicate that an error has already
// been reported to the user, so no need to continue checking. // been reported to the user, so no need to continue checking.
#[derive(Clone, Copy, Show)] #[derive(Clone, Copy, Debug)]
pub struct ErrorReported; pub struct ErrorReported;
pub fn time<T, U, F>(do_it: bool, what: &str, u: U, f: F) -> T where pub fn time<T, U, F>(do_it: bool, what: &str, u: U, f: F) -> T where

View file

@ -14,7 +14,7 @@ pub fn lev_distance(me: &str, t: &str) -> uint {
if me.is_empty() { return t.chars().count(); } if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); } if t.is_empty() { return me.chars().count(); }
let mut dcol: Vec<_> = range(0, t.len() + 1).collect(); let mut dcol: Vec<_> = (0..t.len() + 1).collect();
let mut t_last = 0; let mut t_last = 0;
for (i, sc) in me.chars().enumerate() { for (i, sc) in me.chars().enumerate() {
@ -45,7 +45,7 @@ pub fn lev_distance(me: &str, t: &str) -> uint {
fn test_lev_distance() { fn test_lev_distance() {
use std::char::{ from_u32, MAX }; use std::char::{ from_u32, MAX };
// Test bytelength agnosticity // Test bytelength agnosticity
for c in range(0u32, MAX as u32) for c in (0u32..MAX as u32)
.filter_map(|i| from_u32(i)) .filter_map(|i| from_u32(i))
.map(|i| i.to_string()) { .map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[], &c[]), 0); assert_eq!(lev_distance(&c[], &c[]), 0);

View file

@ -52,7 +52,7 @@ use std::iter::range_step;
use syntax::ast; use syntax::ast;
use syntax::visit; use syntax::visit;
#[derive(Clone, PartialEq, Show)] #[derive(Clone, PartialEq, Debug)]
pub struct Svh { pub struct Svh {
hash: String, hash: String,
} }

View file

@ -84,7 +84,7 @@ mod x86_64_unknown_linux_gnu;
/// Everything `rustc` knows about how to compile for a specific target. /// Everything `rustc` knows about how to compile for a specific target.
/// ///
/// Every field here must be specified, and has no default value. /// Every field here must be specified, and has no default value.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct Target { pub struct Target {
/// [Data layout](http://llvm.org/docs/LangRef.html#data-layout) to pass to LLVM. /// [Data layout](http://llvm.org/docs/LangRef.html#data-layout) to pass to LLVM.
pub data_layout: String, pub data_layout: String,
@ -107,7 +107,7 @@ pub struct Target {
/// ///
/// This has an implementation of `Default`, see each field for what the default is. In general, /// This has an implementation of `Default`, see each field for what the default is. In general,
/// these try to take "minimal defaults" that don't assume anything about the runtime they run in. /// these try to take "minimal defaults" that don't assume anything about the runtime they run in.
#[derive(Clone, Show)] #[derive(Clone, Debug)]
pub struct TargetOptions { pub struct TargetOptions {
/// Linker to invoke. Defaults to "cc". /// Linker to invoke. Defaults to "cc".
pub linker: String, pub linker: String,

View file

@ -73,7 +73,7 @@
/// } /// }
/// } /// }
/// ///
/// impl fmt::Show for Flags { /// impl fmt::Debug for Flags {
/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
/// write!(f, "hi!") /// write!(f, "hi!")
/// } /// }

View file

@ -358,7 +358,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>,
parent_ty.repr(tcx)), parent_ty.repr(tcx)),
}; };
let tuple_len = v.len(); let tuple_len = v.len();
for i in range(0, tuple_len) { for i in 0..tuple_len {
if i == tuple_idx { continue } if i == tuple_idx { continue }
let field_name = mc::PositionalField(i); let field_name = mc::PositionalField(i);
add_fragment_sibling_local(field_name, None); add_fragment_sibling_local(field_name, None);

View file

@ -21,7 +21,7 @@ use syntax::codemap::Span;
use std::rc::Rc; use std::rc::Rc;
#[derive(Show)] #[derive(Debug)]
pub enum RestrictionResult<'tcx> { pub enum RestrictionResult<'tcx> {
Safe, Safe,
SafeIf(Rc<LoanPath<'tcx>>, Vec<Rc<LoanPath<'tcx>>>) SafeIf(Rc<LoanPath<'tcx>>, Vec<Rc<LoanPath<'tcx>>>)

Some files were not shown because too many files have changed in this diff Show more