Remove unnecessary parentheses.
This commit is contained in:
parent
3901228811
commit
39713b8295
47 changed files with 90 additions and 98 deletions
|
@ -140,14 +140,9 @@ pub fn parse_config(args: ~[~str]) -> config {
|
||||||
adb_test_dir:
|
adb_test_dir:
|
||||||
opt_str2(matches.opt_str("adb-test-dir")).to_str(),
|
opt_str2(matches.opt_str("adb-test-dir")).to_str(),
|
||||||
adb_device_status:
|
adb_device_status:
|
||||||
if (opt_str2(matches.opt_str("target")) ==
|
"arm-linux-androideabi" == opt_str2(matches.opt_str("target")) &&
|
||||||
~"arm-linux-androideabi") {
|
"(none)" != opt_str2(matches.opt_str("adb-test-dir")) &&
|
||||||
if (opt_str2(matches.opt_str("adb-test-dir")) !=
|
!opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
|
||||||
~"(none)" &&
|
|
||||||
opt_str2(matches.opt_str("adb-test-dir")) !=
|
|
||||||
~"") { true }
|
|
||||||
else { false }
|
|
||||||
} else { false },
|
|
||||||
test_shard: test::opt_shard(matches.opt_str("test-shard")),
|
test_shard: test::opt_shard(matches.opt_str("test-shard")),
|
||||||
verbose: matches.opt_present("verbose")
|
verbose: matches.opt_present("verbose")
|
||||||
}
|
}
|
||||||
|
|
|
@ -532,9 +532,9 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
|
||||||
if !found_flags[i] {
|
if !found_flags[i] {
|
||||||
debug!("prefix={} ee.kind={} ee.msg={} line={}",
|
debug!("prefix={} ee.kind={} ee.msg={} line={}",
|
||||||
prefixes[i], ee.kind, ee.msg, line);
|
prefixes[i], ee.kind, ee.msg, line);
|
||||||
if (prefix_matches(line, prefixes[i]) &&
|
if prefix_matches(line, prefixes[i]) &&
|
||||||
line.contains(ee.kind) &&
|
line.contains(ee.kind) &&
|
||||||
line.contains(ee.msg)) {
|
line.contains(ee.msg) {
|
||||||
found_flags[i] = true;
|
found_flags[i] = true;
|
||||||
was_expected = true;
|
was_expected = true;
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -1025,7 +1025,7 @@ mod bench {
|
||||||
pub fn vuint_at_A_aligned(bh: &mut BenchHarness) {
|
pub fn vuint_at_A_aligned(bh: &mut BenchHarness) {
|
||||||
use std::vec;
|
use std::vec;
|
||||||
let data = vec::from_fn(4*100, |i| {
|
let data = vec::from_fn(4*100, |i| {
|
||||||
match (i % 2) {
|
match i % 2 {
|
||||||
0 => 0x80u8,
|
0 => 0x80u8,
|
||||||
_ => i as u8,
|
_ => i as u8,
|
||||||
}
|
}
|
||||||
|
@ -1033,7 +1033,7 @@ mod bench {
|
||||||
let mut sum = 0u;
|
let mut sum = 0u;
|
||||||
bh.iter(|| {
|
bh.iter(|| {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while (i < data.len()) {
|
while i < data.len() {
|
||||||
sum += reader::vuint_at(data, i).val;
|
sum += reader::vuint_at(data, i).val;
|
||||||
i += 4;
|
i += 4;
|
||||||
}
|
}
|
||||||
|
@ -1044,7 +1044,7 @@ mod bench {
|
||||||
pub fn vuint_at_A_unaligned(bh: &mut BenchHarness) {
|
pub fn vuint_at_A_unaligned(bh: &mut BenchHarness) {
|
||||||
use std::vec;
|
use std::vec;
|
||||||
let data = vec::from_fn(4*100+1, |i| {
|
let data = vec::from_fn(4*100+1, |i| {
|
||||||
match (i % 2) {
|
match i % 2 {
|
||||||
1 => 0x80u8,
|
1 => 0x80u8,
|
||||||
_ => i as u8
|
_ => i as u8
|
||||||
}
|
}
|
||||||
|
@ -1052,7 +1052,7 @@ mod bench {
|
||||||
let mut sum = 0u;
|
let mut sum = 0u;
|
||||||
bh.iter(|| {
|
bh.iter(|| {
|
||||||
let mut i = 1;
|
let mut i = 1;
|
||||||
while (i < data.len()) {
|
while i < data.len() {
|
||||||
sum += reader::vuint_at(data, i).val;
|
sum += reader::vuint_at(data, i).val;
|
||||||
i += 4;
|
i += 4;
|
||||||
}
|
}
|
||||||
|
@ -1063,7 +1063,7 @@ mod bench {
|
||||||
pub fn vuint_at_D_aligned(bh: &mut BenchHarness) {
|
pub fn vuint_at_D_aligned(bh: &mut BenchHarness) {
|
||||||
use std::vec;
|
use std::vec;
|
||||||
let data = vec::from_fn(4*100, |i| {
|
let data = vec::from_fn(4*100, |i| {
|
||||||
match (i % 4) {
|
match i % 4 {
|
||||||
0 => 0x10u8,
|
0 => 0x10u8,
|
||||||
3 => i as u8,
|
3 => i as u8,
|
||||||
_ => 0u8
|
_ => 0u8
|
||||||
|
@ -1072,7 +1072,7 @@ mod bench {
|
||||||
let mut sum = 0u;
|
let mut sum = 0u;
|
||||||
bh.iter(|| {
|
bh.iter(|| {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while (i < data.len()) {
|
while i < data.len() {
|
||||||
sum += reader::vuint_at(data, i).val;
|
sum += reader::vuint_at(data, i).val;
|
||||||
i += 4;
|
i += 4;
|
||||||
}
|
}
|
||||||
|
@ -1083,7 +1083,7 @@ mod bench {
|
||||||
pub fn vuint_at_D_unaligned(bh: &mut BenchHarness) {
|
pub fn vuint_at_D_unaligned(bh: &mut BenchHarness) {
|
||||||
use std::vec;
|
use std::vec;
|
||||||
let data = vec::from_fn(4*100+1, |i| {
|
let data = vec::from_fn(4*100+1, |i| {
|
||||||
match (i % 4) {
|
match i % 4 {
|
||||||
1 => 0x10u8,
|
1 => 0x10u8,
|
||||||
0 => i as u8,
|
0 => i as u8,
|
||||||
_ => 0u8
|
_ => 0u8
|
||||||
|
@ -1092,7 +1092,7 @@ mod bench {
|
||||||
let mut sum = 0u;
|
let mut sum = 0u;
|
||||||
bh.iter(|| {
|
bh.iter(|| {
|
||||||
let mut i = 1;
|
let mut i = 1;
|
||||||
while (i < data.len()) {
|
while i < data.len() {
|
||||||
sum += reader::vuint_at(data, i).val;
|
sum += reader::vuint_at(data, i).val;
|
||||||
i += 4;
|
i += 4;
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,7 +114,7 @@ impl<E:CLike> Items<E> {
|
||||||
|
|
||||||
impl<E:CLike> Iterator<E> for Items<E> {
|
impl<E:CLike> Iterator<E> for Items<E> {
|
||||||
fn next(&mut self) -> Option<E> {
|
fn next(&mut self) -> Option<E> {
|
||||||
if (self.bits == 0) {
|
if self.bits == 0 {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -195,7 +195,7 @@ impl Matches {
|
||||||
|
|
||||||
fn opt_val(&self, nm: &str) -> Option<Optval> {
|
fn opt_val(&self, nm: &str) -> Option<Optval> {
|
||||||
let vals = self.opt_vals(nm);
|
let vals = self.opt_vals(nm);
|
||||||
if (vals.is_empty()) {
|
if vals.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(vals[0].clone())
|
Some(vals[0].clone())
|
||||||
|
@ -797,7 +797,7 @@ pub mod groups {
|
||||||
let slice: || = || { cont = it(ss.slice(slice_start, last_end)) };
|
let slice: || = || { cont = it(ss.slice(slice_start, last_end)) };
|
||||||
|
|
||||||
// if the limit is larger than the string, lower it to save cycles
|
// if the limit is larger than the string, lower it to save cycles
|
||||||
if (lim >= fake_i) {
|
if lim >= fake_i {
|
||||||
lim = fake_i;
|
lim = fake_i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -929,7 +929,7 @@ impl<T : Iterator<char>> Parser<T> {
|
||||||
return self.error(~"EOF while parsing string");
|
return self.error(~"EOF while parsing string");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (escape) {
|
if escape {
|
||||||
match self.ch {
|
match self.ch {
|
||||||
'"' => res.push_char('"'),
|
'"' => res.push_char('"'),
|
||||||
'\\' => res.push_char('\\'),
|
'\\' => res.push_char('\\'),
|
||||||
|
@ -1360,7 +1360,7 @@ impl serialize::Decoder for Decoder {
|
||||||
/// Test if two json values are less than one another
|
/// Test if two json values are less than one another
|
||||||
impl Ord for Json {
|
impl Ord for Json {
|
||||||
fn lt(&self, other: &Json) -> bool {
|
fn lt(&self, other: &Json) -> bool {
|
||||||
match (*self) {
|
match *self {
|
||||||
Number(f0) => {
|
Number(f0) => {
|
||||||
match *other {
|
match *other {
|
||||||
Number(f1) => f0 < f1,
|
Number(f1) => f0 < f1,
|
||||||
|
|
|
@ -561,9 +561,9 @@ impl ToPrimitive for BigUint {
|
||||||
impl FromPrimitive for BigUint {
|
impl FromPrimitive for BigUint {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_i64(n: i64) -> Option<BigUint> {
|
fn from_i64(n: i64) -> Option<BigUint> {
|
||||||
if (n > 0) {
|
if n > 0 {
|
||||||
FromPrimitive::from_u64(n as u64)
|
FromPrimitive::from_u64(n as u64)
|
||||||
} else if (n == 0) {
|
} else if n == 0 {
|
||||||
Some(Zero::zero())
|
Some(Zero::zero())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|
|
@ -178,7 +178,7 @@ pub fn parse(file: &mut io::Reader,
|
||||||
|
|
||||||
// Check magic number
|
// Check magic number
|
||||||
let magic = file.read_le_u16();
|
let magic = file.read_le_u16();
|
||||||
if (magic != 0x011A) {
|
if magic != 0x011A {
|
||||||
return Err(format!("invalid magic number: expected {:x} but found {:x}",
|
return Err(format!("invalid magic number: expected {:x} but found {:x}",
|
||||||
0x011A, magic as uint));
|
0x011A, magic as uint));
|
||||||
}
|
}
|
||||||
|
|
|
@ -808,7 +808,7 @@ pub fn strptime(s: &str, format: &str) -> Result<Tm, ~str> {
|
||||||
/// Formats the time according to the format string.
|
/// Formats the time according to the format string.
|
||||||
pub fn strftime(format: &str, tm: &Tm) -> ~str {
|
pub fn strftime(format: &str, tm: &Tm) -> ~str {
|
||||||
fn days_in_year(year: int) -> i32 {
|
fn days_in_year(year: int) -> i32 {
|
||||||
if ((year % 4 == 0) && ((year % 100 != 0) || (year % 400 == 0))) {
|
if (year % 4 == 0) && ((year % 100 != 0) || (year % 400 == 0)) {
|
||||||
366 /* Days in a leap year */
|
366 /* Days in a leap year */
|
||||||
} else {
|
} else {
|
||||||
365 /* Days in a non-leap year */
|
365 /* Days in a non-leap year */
|
||||||
|
@ -838,14 +838,14 @@ pub fn strftime(format: &str, tm: &Tm) -> ~str {
|
||||||
let mut year: int = tm.tm_year as int + 1900;
|
let mut year: int = tm.tm_year as int + 1900;
|
||||||
let mut days: int = iso_week_days (tm.tm_yday, tm.tm_wday);
|
let mut days: int = iso_week_days (tm.tm_yday, tm.tm_wday);
|
||||||
|
|
||||||
if (days < 0) {
|
if days < 0 {
|
||||||
/* This ISO week belongs to the previous year. */
|
/* This ISO week belongs to the previous year. */
|
||||||
year -= 1;
|
year -= 1;
|
||||||
days = iso_week_days (tm.tm_yday + (days_in_year(year)), tm.tm_wday);
|
days = iso_week_days (tm.tm_yday + (days_in_year(year)), tm.tm_wday);
|
||||||
} else {
|
} else {
|
||||||
let d: int = iso_week_days (tm.tm_yday - (days_in_year(year)),
|
let d: int = iso_week_days (tm.tm_yday - (days_in_year(year)),
|
||||||
tm.tm_wday);
|
tm.tm_wday);
|
||||||
if (0 <= d) {
|
if 0 <= d {
|
||||||
/* This ISO week belongs to the next year. */
|
/* This ISO week belongs to the next year. */
|
||||||
year += 1;
|
year += 1;
|
||||||
days = d;
|
days = d;
|
||||||
|
|
|
@ -614,16 +614,16 @@ mod test {
|
||||||
|
|
||||||
// Test error reporting
|
// Test error reporting
|
||||||
let e = Uuid::parse_string("67e5504410b1426f9247bb680e5fe0c").unwrap_err();
|
let e = Uuid::parse_string("67e5504410b1426f9247bb680e5fe0c").unwrap_err();
|
||||||
assert!(match(e){ ErrorInvalidLength(n) => n==31, _ => false });
|
assert!(match e { ErrorInvalidLength(n) => n==31, _ => false });
|
||||||
|
|
||||||
let e = Uuid::parse_string("67e550X410b1426f9247bb680e5fe0cd").unwrap_err();
|
let e = Uuid::parse_string("67e550X410b1426f9247bb680e5fe0cd").unwrap_err();
|
||||||
assert!(match(e){ ErrorInvalidCharacter(c, n) => c=='X' && n==6, _ => false });
|
assert!(match e { ErrorInvalidCharacter(c, n) => c=='X' && n==6, _ => false });
|
||||||
|
|
||||||
let e = Uuid::parse_string("67e550-4105b1426f9247bb680e5fe0c").unwrap_err();
|
let e = Uuid::parse_string("67e550-4105b1426f9247bb680e5fe0c").unwrap_err();
|
||||||
assert!(match(e){ ErrorInvalidGroups(n) => n==2, _ => false });
|
assert!(match e { ErrorInvalidGroups(n) => n==2, _ => false });
|
||||||
|
|
||||||
let e = Uuid::parse_string("F9168C5E-CEB2-4faa-B6BF1-02BF39FA1E4").unwrap_err();
|
let e = Uuid::parse_string("F9168C5E-CEB2-4faa-B6BF1-02BF39FA1E4").unwrap_err();
|
||||||
assert!(match(e){ ErrorInvalidGroupLength(g, n, e) => g==3 && n==5 && e==4, _ => false });
|
assert!(match e { ErrorInvalidGroupLength(g, n, e) => g==3 && n==5 && e==4, _ => false });
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -1323,7 +1323,7 @@ mod test {
|
||||||
fn roundtrip(id: int, n_tasks: int,
|
fn roundtrip(id: int, n_tasks: int,
|
||||||
p: &Port<(int, Chan<()>)>,
|
p: &Port<(int, Chan<()>)>,
|
||||||
ch: &Chan<(int, Chan<()>)>) {
|
ch: &Chan<(int, Chan<()>)>) {
|
||||||
while (true) {
|
loop {
|
||||||
match p.recv() {
|
match p.recv() {
|
||||||
(1, end_chan) => {
|
(1, end_chan) => {
|
||||||
debug!("{}\n", id);
|
debug!("{}\n", id);
|
||||||
|
|
|
@ -508,11 +508,11 @@ pub fn readdir(p: &CString) -> IoResult<~[Path]> {
|
||||||
|
|
||||||
let dir_ptr = p.with_ref(|buf| opendir(buf));
|
let dir_ptr = p.with_ref(|buf| opendir(buf));
|
||||||
|
|
||||||
if (dir_ptr as uint != 0) {
|
if dir_ptr as uint != 0 {
|
||||||
let mut paths = ~[];
|
let mut paths = ~[];
|
||||||
debug!("os::list_dir -- opendir() SUCCESS");
|
debug!("os::list_dir -- opendir() SUCCESS");
|
||||||
let mut entry_ptr = readdir(dir_ptr);
|
let mut entry_ptr = readdir(dir_ptr);
|
||||||
while (entry_ptr as uint != 0) {
|
while entry_ptr as uint != 0 {
|
||||||
let cstr = CString::new(rust_list_dir_val(entry_ptr), false);
|
let cstr = CString::new(rust_list_dir_val(entry_ptr), false);
|
||||||
paths.push(Path::new(cstr));
|
paths.push(Path::new(cstr));
|
||||||
entry_ptr = readdir(dir_ptr);
|
entry_ptr = readdir(dir_ptr);
|
||||||
|
|
|
@ -1956,7 +1956,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, crate: &Crate)
|
||||||
|
|
||||||
ecx.stats.total_bytes.set(ebml_w.writer.tell());
|
ecx.stats.total_bytes.set(ebml_w.writer.tell());
|
||||||
|
|
||||||
if (tcx.sess.meta_stats()) {
|
if tcx.sess.meta_stats() {
|
||||||
for e in ebml_w.writer.get_ref().iter() {
|
for e in ebml_w.writer.get_ref().iter() {
|
||||||
if *e == 0 {
|
if *e == 0 {
|
||||||
ecx.stats.zero_bytes.set(ecx.stats.zero_bytes.get() + 1);
|
ecx.stats.zero_bytes.set(ecx.stats.zero_bytes.get() + 1);
|
||||||
|
|
|
@ -175,7 +175,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
|
||||||
useful(ty, ref ctor) => {
|
useful(ty, ref ctor) => {
|
||||||
match ty::get(ty).sty {
|
match ty::get(ty).sty {
|
||||||
ty::ty_bool => {
|
ty::ty_bool => {
|
||||||
match (*ctor) {
|
match *ctor {
|
||||||
val(const_bool(true)) => Some(@"true"),
|
val(const_bool(true)) => Some(@"true"),
|
||||||
val(const_bool(false)) => Some(@"false"),
|
val(const_bool(false)) => Some(@"false"),
|
||||||
_ => None
|
_ => None
|
||||||
|
|
|
@ -1039,7 +1039,7 @@ impl Resolver {
|
||||||
let mut duplicate_type = NoError;
|
let mut duplicate_type = NoError;
|
||||||
let ns = match duplicate_checking_mode {
|
let ns = match duplicate_checking_mode {
|
||||||
ForbidDuplicateModules => {
|
ForbidDuplicateModules => {
|
||||||
if (child.get_module_if_available().is_some()) {
|
if child.get_module_if_available().is_some() {
|
||||||
duplicate_type = ModuleError;
|
duplicate_type = ModuleError;
|
||||||
}
|
}
|
||||||
Some(TypeNS)
|
Some(TypeNS)
|
||||||
|
@ -1074,7 +1074,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
OverwriteDuplicates => None
|
OverwriteDuplicates => None
|
||||||
};
|
};
|
||||||
if (duplicate_type != NoError) {
|
if duplicate_type != NoError {
|
||||||
// Return an error here by looking up the namespace that
|
// Return an error here by looking up the namespace that
|
||||||
// had the duplicate.
|
// had the duplicate.
|
||||||
let ns = ns.unwrap();
|
let ns = ns.unwrap();
|
||||||
|
|
|
@ -666,7 +666,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
|
||||||
}
|
}
|
||||||
NullablePointer{ nonnull: ref nonnull, nullfields: ref nullfields,
|
NullablePointer{ nonnull: ref nonnull, nullfields: ref nullfields,
|
||||||
nndiscr, .. } => {
|
nndiscr, .. } => {
|
||||||
if (discr == nndiscr) {
|
if discr == nndiscr {
|
||||||
struct_field_ptr(bcx, nonnull, val, ix, false)
|
struct_field_ptr(bcx, nonnull, val, ix, false)
|
||||||
} else {
|
} else {
|
||||||
// The unit-like case might have a nonzero number of unit-like fields.
|
// The unit-like case might have a nonzero number of unit-like fields.
|
||||||
|
@ -783,7 +783,7 @@ fn build_const_struct(ccx: &CrateContext, st: &Struct, vals: &[ValueRef])
|
||||||
/*bad*/as u64;
|
/*bad*/as u64;
|
||||||
let target_offset = roundup(offset, type_align);
|
let target_offset = roundup(offset, type_align);
|
||||||
offset = roundup(offset, val_align);
|
offset = roundup(offset, val_align);
|
||||||
if (offset != target_offset) {
|
if offset != target_offset {
|
||||||
cfields.push(padding(target_offset - offset));
|
cfields.push(padding(target_offset - offset));
|
||||||
offset = target_offset;
|
offset = target_offset;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2156,7 +2156,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
||||||
_ => fail!("get_item_val: weird result in table")
|
_ => fail!("get_item_val: weird result in table")
|
||||||
};
|
};
|
||||||
|
|
||||||
match (attr::first_attr_value_str_by_name(i.attrs, "link_section")) {
|
match attr::first_attr_value_str_by_name(i.attrs, "link_section") {
|
||||||
Some(sect) => unsafe {
|
Some(sect) => unsafe {
|
||||||
sect.with_c_str(|buf| {
|
sect.with_c_str(|buf| {
|
||||||
llvm::LLVMSetSection(v, buf);
|
llvm::LLVMSetSection(v, buf);
|
||||||
|
|
|
@ -686,7 +686,7 @@ pub fn iter_vec_raw<'r,
|
||||||
let fcx = bcx.fcx;
|
let fcx = bcx.fcx;
|
||||||
|
|
||||||
let vt = vec_types(bcx, vec_ty);
|
let vt = vec_types(bcx, vec_ty);
|
||||||
if (vt.llunit_alloc_size == 0) {
|
if vt.llunit_alloc_size == 0 {
|
||||||
// Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)
|
// Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)
|
||||||
iter_vec_loop(bcx, data_ptr, &vt, fill, f)
|
iter_vec_loop(bcx, data_ptr, &vt, fill, f)
|
||||||
} else {
|
} else {
|
||||||
|
@ -740,4 +740,3 @@ pub fn iter_vec_unboxed<'r,
|
||||||
let dataptr = get_dataptr(bcx, body_ptr);
|
let dataptr = get_dataptr(bcx, body_ptr);
|
||||||
return iter_vec_raw(bcx, dataptr, vec_ty, fill, f);
|
return iter_vec_raw(bcx, dataptr, vec_ty, fill, f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2883,7 +2883,7 @@ pub fn adjust_ty(cx: ctxt,
|
||||||
AutoDerefRef(ref adj) => {
|
AutoDerefRef(ref adj) => {
|
||||||
let mut adjusted_ty = unadjusted_ty;
|
let mut adjusted_ty = unadjusted_ty;
|
||||||
|
|
||||||
if (!ty::type_is_error(adjusted_ty)) {
|
if !ty::type_is_error(adjusted_ty) {
|
||||||
for i in range(0, adj.autoderefs) {
|
for i in range(0, adj.autoderefs) {
|
||||||
match ty::deref(adjusted_ty, true) {
|
match ty::deref(adjusted_ty, true) {
|
||||||
Some(mt) => { adjusted_ty = mt.ty; }
|
Some(mt) => { adjusted_ty = mt.ty; }
|
||||||
|
|
|
@ -740,7 +740,7 @@ fn constrain_regions_in_type(
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return (e == rcx.errors_reported);
|
return e == rcx.errors_reported;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod guarantor {
|
pub mod guarantor {
|
||||||
|
@ -1175,7 +1175,7 @@ pub mod guarantor {
|
||||||
let mut ct = ct;
|
let mut ct = ct;
|
||||||
let tcx = rcx.fcx.ccx.tcx;
|
let tcx = rcx.fcx.ccx.tcx;
|
||||||
|
|
||||||
if (ty::type_is_error(ct.ty)) {
|
if ty::type_is_error(ct.ty) {
|
||||||
ct.cat.pointer = NotPointer;
|
ct.cat.pointer = NotPointer;
|
||||||
return ct;
|
return ct;
|
||||||
}
|
}
|
||||||
|
|
|
@ -599,7 +599,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: @FnCtxt, is_early: bool) {
|
||||||
(&ty::ty_box(..), ty::BoxTraitStore) |
|
(&ty::ty_box(..), ty::BoxTraitStore) |
|
||||||
(&ty::ty_uniq(..), ty::UniqTraitStore) |
|
(&ty::ty_uniq(..), ty::UniqTraitStore) |
|
||||||
(&ty::ty_rptr(..), ty::RegionTraitStore(..)) => {
|
(&ty::ty_rptr(..), ty::RegionTraitStore(..)) => {
|
||||||
let typ = match (&ty::get(ty).sty) {
|
let typ = match &ty::get(ty).sty {
|
||||||
&ty::ty_box(typ) | &ty::ty_uniq(typ) => typ,
|
&ty::ty_box(typ) | &ty::ty_uniq(typ) => typ,
|
||||||
&ty::ty_rptr(_, mt) => mt.ty,
|
&ty::ty_rptr(_, mt) => mt.ty,
|
||||||
_ => fail!("shouldn't get here"),
|
_ => fail!("shouldn't get here"),
|
||||||
|
|
|
@ -889,8 +889,8 @@ impl<'a> SolveContext<'a> {
|
||||||
type_params: opt_vec::Empty,
|
type_params: opt_vec::Empty,
|
||||||
region_params: opt_vec::Empty
|
region_params: opt_vec::Empty
|
||||||
};
|
};
|
||||||
while (index < num_inferred &&
|
while index < num_inferred &&
|
||||||
inferred_infos[index].item_id == item_id) {
|
inferred_infos[index].item_id == item_id {
|
||||||
let info = &inferred_infos[index];
|
let info = &inferred_infos[index];
|
||||||
match info.kind {
|
match info.kind {
|
||||||
SelfParam => {
|
SelfParam => {
|
||||||
|
@ -999,4 +999,3 @@ fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
|
||||||
(x, ty::Bivariant) | (ty::Bivariant, x) => x,
|
(x, ty::Bivariant) | (ty::Bivariant, x) => x,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -119,7 +119,7 @@ pub fn parse_args(args: &[~str]) -> Result<ParseResult, int> {
|
||||||
let mut args = matches.free.clone();
|
let mut args = matches.free.clone();
|
||||||
args.shift();
|
args.shift();
|
||||||
|
|
||||||
if (args.len() < 1) {
|
if args.len() < 1 {
|
||||||
usage::general();
|
usage::general();
|
||||||
return Err(1);
|
return Err(1);
|
||||||
}
|
}
|
||||||
|
@ -154,7 +154,7 @@ pub fn parse_args(args: &[~str]) -> Result<ParseResult, int> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let cmd_opt = args.iter().filter_map( |s| from_str(s.clone())).next();
|
let cmd_opt = args.iter().filter_map( |s| from_str(s.clone())).next();
|
||||||
let command = match(cmd_opt){
|
let command = match cmd_opt {
|
||||||
None => {
|
None => {
|
||||||
debug!("No legal command. Returning 0");
|
debug!("No legal command. Returning 0");
|
||||||
usage::general();
|
usage::general();
|
||||||
|
@ -194,4 +194,3 @@ pub fn parse_args(args: &[~str]) -> Result<ParseResult, int> {
|
||||||
sysroot: supplied_sysroot
|
sysroot: supplied_sysroot
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
#[bench]
|
#[bench]
|
||||||
pub fn g() {
|
pub fn g() {
|
||||||
let mut x = 0;
|
let mut x = 0;
|
||||||
while(x < 1000) {
|
while x < 1000 {
|
||||||
x += 1;
|
x += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -263,7 +263,7 @@ impl Streaming for SipState {
|
||||||
compress!(v0, v1, v2, v3);
|
compress!(v0, v1, v2, v3);
|
||||||
compress!(v0, v1, v2, v3);
|
compress!(v0, v1, v2, v3);
|
||||||
|
|
||||||
return (v0 ^ v1 ^ v2 ^ v3);
|
return v0 ^ v1 ^ v2 ^ v3;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn result_bytes(&mut self) -> ~[u8] {
|
fn result_bytes(&mut self) -> ~[u8] {
|
||||||
|
|
|
@ -518,7 +518,7 @@ mod bench {
|
||||||
let mut sum = 0u64;
|
let mut sum = 0u64;
|
||||||
bh.iter(|| {
|
bh.iter(|| {
|
||||||
let mut i = $start_index;
|
let mut i = $start_index;
|
||||||
while (i < data.len()) {
|
while i < data.len() {
|
||||||
sum += u64_from_be_bytes(data, i, $size);
|
sum += u64_from_be_bytes(data, i, $size);
|
||||||
i += $stride;
|
i += $stride;
|
||||||
}
|
}
|
||||||
|
|
|
@ -292,7 +292,7 @@ impl<T> Option<T> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn filtered(self, f: |t: &T| -> bool) -> Option<T> {
|
pub fn filtered(self, f: |t: &T| -> bool) -> Option<T> {
|
||||||
match self {
|
match self {
|
||||||
Some(x) => if(f(&x)) {Some(x)} else {None},
|
Some(x) => if f(&x) {Some(x)} else {None},
|
||||||
None => None
|
None => None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -605,7 +605,7 @@ mod tests {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
Some(10).while_some(|j| {
|
Some(10).while_some(|j| {
|
||||||
i += 1;
|
i += 1;
|
||||||
if (j > 0) {
|
if j > 0 {
|
||||||
Some(j-1)
|
Some(j-1)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|
|
@ -103,9 +103,9 @@ pub mod win32 {
|
||||||
let k = f(buf.as_mut_ptr(), TMPBUF_SZ as DWORD);
|
let k = f(buf.as_mut_ptr(), TMPBUF_SZ as DWORD);
|
||||||
if k == (0 as DWORD) {
|
if k == (0 as DWORD) {
|
||||||
done = true;
|
done = true;
|
||||||
} else if (k == n &&
|
} else if k == n &&
|
||||||
libc::GetLastError() ==
|
libc::GetLastError() ==
|
||||||
libc::ERROR_INSUFFICIENT_BUFFER as DWORD) {
|
libc::ERROR_INSUFFICIENT_BUFFER as DWORD {
|
||||||
n *= (2 as DWORD);
|
n *= (2 as DWORD);
|
||||||
} else {
|
} else {
|
||||||
done = true;
|
done = true;
|
||||||
|
@ -159,7 +159,7 @@ pub fn env() -> ~[(~str,~str)] {
|
||||||
FreeEnvironmentStringsA
|
FreeEnvironmentStringsA
|
||||||
};
|
};
|
||||||
let ch = GetEnvironmentStringsA();
|
let ch = GetEnvironmentStringsA();
|
||||||
if (ch as uint == 0) {
|
if ch as uint == 0 {
|
||||||
fail!("os::env() failure getting env string from OS: {}",
|
fail!("os::env() failure getting env string from OS: {}",
|
||||||
os::last_os_error());
|
os::last_os_error());
|
||||||
}
|
}
|
||||||
|
@ -176,7 +176,7 @@ pub fn env() -> ~[(~str,~str)] {
|
||||||
fn rust_env_pairs() -> **libc::c_char;
|
fn rust_env_pairs() -> **libc::c_char;
|
||||||
}
|
}
|
||||||
let environ = rust_env_pairs();
|
let environ = rust_env_pairs();
|
||||||
if (environ as uint == 0) {
|
if environ as uint == 0 {
|
||||||
fail!("os::env() failure getting env string from OS: {}",
|
fail!("os::env() failure getting env string from OS: {}",
|
||||||
os::last_os_error());
|
os::last_os_error());
|
||||||
}
|
}
|
||||||
|
|
|
@ -201,7 +201,7 @@ pub fn to_mut_unsafe_ptr<T>(thing: &mut T) -> *mut T {
|
||||||
*/
|
*/
|
||||||
pub unsafe fn array_each_with_len<T>(arr: **T, len: uint, cb: |*T|) {
|
pub unsafe fn array_each_with_len<T>(arr: **T, len: uint, cb: |*T|) {
|
||||||
debug!("array_each_with_len: before iterate");
|
debug!("array_each_with_len: before iterate");
|
||||||
if (arr as uint == 0) {
|
if arr as uint == 0 {
|
||||||
fail!("ptr::array_each_with_len failure: arr input is null pointer");
|
fail!("ptr::array_each_with_len failure: arr input is null pointer");
|
||||||
}
|
}
|
||||||
//let start_ptr = *arr;
|
//let start_ptr = *arr;
|
||||||
|
@ -222,7 +222,7 @@ pub unsafe fn array_each_with_len<T>(arr: **T, len: uint, cb: |*T|) {
|
||||||
Dragons be here.
|
Dragons be here.
|
||||||
*/
|
*/
|
||||||
pub unsafe fn array_each<T>(arr: **T, cb: |*T|) {
|
pub unsafe fn array_each<T>(arr: **T, cb: |*T|) {
|
||||||
if (arr as uint == 0) {
|
if arr as uint == 0 {
|
||||||
fail!("ptr::array_each_with_len failure: arr input is null pointer");
|
fail!("ptr::array_each_with_len failure: arr input is null pointer");
|
||||||
}
|
}
|
||||||
let len = buf_len(arr);
|
let len = buf_len(arr);
|
||||||
|
|
|
@ -861,7 +861,7 @@ pub fn is_utf8(v: &[u8]) -> bool {
|
||||||
pub fn is_utf16(v: &[u16]) -> bool {
|
pub fn is_utf16(v: &[u16]) -> bool {
|
||||||
let len = v.len();
|
let len = v.len();
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
while (i < len) {
|
while i < len {
|
||||||
let u = v[i];
|
let u = v[i];
|
||||||
|
|
||||||
if u <= 0xD7FF_u16 || u >= 0xE000_u16 {
|
if u <= 0xD7FF_u16 || u >= 0xE000_u16 {
|
||||||
|
@ -887,7 +887,7 @@ pub fn is_utf16(v: &[u16]) -> bool {
|
||||||
pub fn utf16_chars(v: &[u16], f: |char|) {
|
pub fn utf16_chars(v: &[u16], f: |char|) {
|
||||||
let len = v.len();
|
let len = v.len();
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
while (i < len && v[i] != 0u16) {
|
while i < len && v[i] != 0u16 {
|
||||||
let u = v[i];
|
let u = v[i];
|
||||||
|
|
||||||
if u <= 0xD7FF_u16 || u >= 0xE000_u16 {
|
if u <= 0xD7FF_u16 || u >= 0xE000_u16 {
|
||||||
|
@ -2326,7 +2326,7 @@ impl<'a> StrSlice<'a> for &'a str {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn char_range_at(&self, i: uint) -> CharRange {
|
fn char_range_at(&self, i: uint) -> CharRange {
|
||||||
if (self[i] < 128u8) {
|
if self[i] < 128u8 {
|
||||||
return CharRange {ch: self[i] as char, next: i + 1 };
|
return CharRange {ch: self[i] as char, next: i + 1 };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,7 @@ impl<T: Send> State<T> {
|
||||||
} else {
|
} else {
|
||||||
pos = enqueue_pos;
|
pos = enqueue_pos;
|
||||||
}
|
}
|
||||||
} else if (diff < 0) {
|
} else if diff < 0 {
|
||||||
return false
|
return false
|
||||||
} else {
|
} else {
|
||||||
pos = self.enqueue_pos.load(Relaxed);
|
pos = self.enqueue_pos.load(Relaxed);
|
||||||
|
|
|
@ -481,7 +481,7 @@ fn test_spawn_sched() {
|
||||||
fn f(i: int, ch: SharedChan<()>) {
|
fn f(i: int, ch: SharedChan<()>) {
|
||||||
let ch = ch.clone();
|
let ch = ch.clone();
|
||||||
do spawn {
|
do spawn {
|
||||||
if (i == 0) {
|
if i == 0 {
|
||||||
ch.send(());
|
ch.send(());
|
||||||
} else {
|
} else {
|
||||||
f(i - 1, ch);
|
f(i - 1, ch);
|
||||||
|
|
|
@ -839,7 +839,7 @@ mod test_map {
|
||||||
let mut ub = map.upper_bound(i);
|
let mut ub = map.upper_bound(i);
|
||||||
let next_key = i - i % step + step;
|
let next_key = i - i % step + step;
|
||||||
let next_pair = (next_key, &value);
|
let next_pair = (next_key, &value);
|
||||||
if (i % step == 0) {
|
if i % step == 0 {
|
||||||
assert_eq!(lb.next(), Some((i, &value)));
|
assert_eq!(lb.next(), Some((i, &value)));
|
||||||
} else {
|
} else {
|
||||||
assert_eq!(lb.next(), Some(next_pair));
|
assert_eq!(lb.next(), Some(next_pair));
|
||||||
|
|
|
@ -46,7 +46,7 @@ impl Ident {
|
||||||
|
|
||||||
impl Eq for Ident {
|
impl Eq for Ident {
|
||||||
fn eq(&self, other: &Ident) -> bool {
|
fn eq(&self, other: &Ident) -> bool {
|
||||||
if (self.ctxt == other.ctxt) {
|
if self.ctxt == other.ctxt {
|
||||||
self.name == other.name
|
self.name == other.name
|
||||||
} else {
|
} else {
|
||||||
// IF YOU SEE ONE OF THESE FAILS: it means that you're comparing
|
// IF YOU SEE ONE OF THESE FAILS: it means that you're comparing
|
||||||
|
|
|
@ -829,9 +829,9 @@ pub fn resolve_internal(id : Ident,
|
||||||
resolve_internal(Ident{name:name,ctxt:ctxt},table,resolve_table);
|
resolve_internal(Ident{name:name,ctxt:ctxt},table,resolve_table);
|
||||||
let resolvedthis =
|
let resolvedthis =
|
||||||
resolve_internal(Ident{name:id.name,ctxt:subctxt},table,resolve_table);
|
resolve_internal(Ident{name:id.name,ctxt:subctxt},table,resolve_table);
|
||||||
if ((resolvedthis == resolvedfrom)
|
if (resolvedthis == resolvedfrom)
|
||||||
&& (marksof(ctxt,resolvedthis,table)
|
&& (marksof(ctxt,resolvedthis,table)
|
||||||
== marksof(subctxt,resolvedthis,table))) {
|
== marksof(subctxt,resolvedthis,table)) {
|
||||||
toname
|
toname
|
||||||
} else {
|
} else {
|
||||||
resolvedthis
|
resolvedthis
|
||||||
|
@ -878,7 +878,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> ~[Mrk] {
|
||||||
Rename(_,name,tl) => {
|
Rename(_,name,tl) => {
|
||||||
// see MTWT for details on the purpose of the stopname.
|
// see MTWT for details on the purpose of the stopname.
|
||||||
// short version: it prevents duplication of effort.
|
// short version: it prevents duplication of effort.
|
||||||
if (name == stopname) {
|
if name == stopname {
|
||||||
return result;
|
return result;
|
||||||
} else {
|
} else {
|
||||||
loopvar = tl;
|
loopvar = tl;
|
||||||
|
@ -903,7 +903,7 @@ pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk {
|
||||||
/// Push a name... unless it matches the one on top, in which
|
/// Push a name... unless it matches the one on top, in which
|
||||||
/// case pop and discard (so two of the same marks cancel)
|
/// case pop and discard (so two of the same marks cancel)
|
||||||
pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) {
|
pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) {
|
||||||
if ((marks.len() > 0) && (getLast(marks) == mark)) {
|
if (marks.len() > 0) && (getLast(marks) == mark) {
|
||||||
marks.pop();
|
marks.pop();
|
||||||
} else {
|
} else {
|
||||||
marks.push(mark);
|
marks.push(mark);
|
||||||
|
@ -927,7 +927,7 @@ pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
|
||||||
|
|
||||||
// are two arrays of segments equal when compared unhygienically?
|
// are two arrays of segments equal when compared unhygienically?
|
||||||
pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> bool {
|
pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> bool {
|
||||||
if (a.len() != b.len()) {
|
if a.len() != b.len() {
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
for (idx,seg) in a.iter().enumerate() {
|
for (idx,seg) in a.iter().enumerate() {
|
||||||
|
|
|
@ -387,7 +387,7 @@ impl CodeMap {
|
||||||
a = m;
|
a = m;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (a >= len) {
|
if a >= len {
|
||||||
fail!("position {} does not resolve to a source location", pos.to_uint())
|
fail!("position {} does not resolve to a source location", pos.to_uint())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
|
||||||
// in this file.
|
// in this file.
|
||||||
// Token-tree macros:
|
// Token-tree macros:
|
||||||
MacInvocTT(ref pth, ref tts, ctxt) => {
|
MacInvocTT(ref pth, ref tts, ctxt) => {
|
||||||
if (pth.segments.len() > 1u) {
|
if pth.segments.len() > 1u {
|
||||||
fld.cx.span_err(
|
fld.cx.span_err(
|
||||||
pth.span,
|
pth.span,
|
||||||
format!("expected macro name without module \
|
format!("expected macro name without module \
|
||||||
|
@ -464,7 +464,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
|
||||||
}
|
}
|
||||||
_ => return expand_non_macro_stmt(s, fld)
|
_ => return expand_non_macro_stmt(s, fld)
|
||||||
};
|
};
|
||||||
if (pth.segments.len() > 1u) {
|
if pth.segments.len() > 1u {
|
||||||
fld.cx.span_err(pth.span, "expected macro name without module separators");
|
fld.cx.span_err(pth.span, "expected macro name without module separators");
|
||||||
return SmallVector::zero();
|
return SmallVector::zero();
|
||||||
}
|
}
|
||||||
|
|
|
@ -333,7 +333,7 @@ pub fn parse(sess: @ParseSess,
|
||||||
MatchTok(ref t) => {
|
MatchTok(ref t) => {
|
||||||
let mut ei_t = ei.clone();
|
let mut ei_t = ei.clone();
|
||||||
//if (token_name_eq(t,&tok)) {
|
//if (token_name_eq(t,&tok)) {
|
||||||
if (token::mtwt_token_eq(t,&tok)) {
|
if token::mtwt_token_eq(t,&tok) {
|
||||||
ei_t.idx += 1;
|
ei_t.idx += 1;
|
||||||
next_eis.push(ei_t);
|
next_eis.push(ei_t);
|
||||||
}
|
}
|
||||||
|
@ -370,12 +370,12 @@ pub fn parse(sess: @ParseSess,
|
||||||
"local ambiguity: multiple parsing options: \
|
"local ambiguity: multiple parsing options: \
|
||||||
built-in NTs {} or {} other options.",
|
built-in NTs {} or {} other options.",
|
||||||
nts, next_eis.len()));
|
nts, next_eis.len()));
|
||||||
} else if (bb_eis.len() == 0u && next_eis.len() == 0u) {
|
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
|
||||||
return Failure(sp, format!("no rules expected the token `{}`",
|
return Failure(sp, format!("no rules expected the token `{}`",
|
||||||
to_str(get_ident_interner(), &tok)));
|
to_str(get_ident_interner(), &tok)));
|
||||||
} else if (next_eis.len() > 0u) {
|
} else if next_eis.len() > 0u {
|
||||||
/* Now process the next token */
|
/* Now process the next token */
|
||||||
while(next_eis.len() > 0u) {
|
while next_eis.len() > 0u {
|
||||||
cur_eis.push(next_eis.pop());
|
cur_eis.push(next_eis.pop());
|
||||||
}
|
}
|
||||||
rdr.next_token();
|
rdr.next_token();
|
||||||
|
|
|
@ -135,7 +135,7 @@ fn generic_extension(cx: &ExtCtxt,
|
||||||
let rhs = match *rhses[i] {
|
let rhs = match *rhses[i] {
|
||||||
// okay, what's your transcriber?
|
// okay, what's your transcriber?
|
||||||
MatchedNonterminal(NtTT(tt)) => {
|
MatchedNonterminal(NtTT(tt)) => {
|
||||||
match (*tt) {
|
match *tt {
|
||||||
// cut off delimiters; don't parse 'em
|
// cut off delimiters; don't parse 'em
|
||||||
TTDelim(ref tts) => {
|
TTDelim(ref tts) => {
|
||||||
(*tts).slice(1u,(*tts).len()-1u).to_owned()
|
(*tts).slice(1u,(*tts).len()-1u).to_owned()
|
||||||
|
|
|
@ -198,7 +198,7 @@ fn fatal_span_verbose(rdr: @StringReader,
|
||||||
// EFFECT: advance peek_tok and peek_span to refer to the next token.
|
// EFFECT: advance peek_tok and peek_span to refer to the next token.
|
||||||
// EFFECT: update the interner, maybe.
|
// EFFECT: update the interner, maybe.
|
||||||
fn string_advance_token(r: @StringReader) {
|
fn string_advance_token(r: @StringReader) {
|
||||||
match (consume_whitespace_and_comments(r)) {
|
match consume_whitespace_and_comments(r) {
|
||||||
Some(comment) => {
|
Some(comment) => {
|
||||||
r.peek_span.set(comment.sp);
|
r.peek_span.set(comment.sp);
|
||||||
r.peek_tok.set(comment.tok);
|
r.peek_tok.set(comment.tok);
|
||||||
|
|
|
@ -3393,7 +3393,7 @@ impl Parser {
|
||||||
|
|
||||||
let mut attributes_box = attrs_remaining;
|
let mut attributes_box = attrs_remaining;
|
||||||
|
|
||||||
while (self.token != token::RBRACE) {
|
while self.token != token::RBRACE {
|
||||||
// parsing items even when they're not allowed lets us give
|
// parsing items even when they're not allowed lets us give
|
||||||
// better error messages and recover more gracefully.
|
// better error messages and recover more gracefully.
|
||||||
attributes_box.push_all(self.parse_outer_attributes());
|
attributes_box.push_all(self.parse_outer_attributes());
|
||||||
|
@ -4373,7 +4373,7 @@ impl Parser {
|
||||||
items: _,
|
items: _,
|
||||||
foreign_items: foreign_items
|
foreign_items: foreign_items
|
||||||
} = self.parse_foreign_items(first_item_attrs, true);
|
} = self.parse_foreign_items(first_item_attrs, true);
|
||||||
if (! attrs_remaining.is_empty()) {
|
if ! attrs_remaining.is_empty() {
|
||||||
self.span_err(self.last_span,
|
self.span_err(self.last_span,
|
||||||
"expected item after attributes");
|
"expected item after attributes");
|
||||||
}
|
}
|
||||||
|
@ -4553,7 +4553,7 @@ impl Parser {
|
||||||
if !self.eat(&token::COMMA) { break; }
|
if !self.eat(&token::COMMA) { break; }
|
||||||
}
|
}
|
||||||
self.expect(&token::RBRACE);
|
self.expect(&token::RBRACE);
|
||||||
if (have_disr && !all_nullary) {
|
if have_disr && !all_nullary {
|
||||||
self.fatal("discriminator values can only be used with a c-like \
|
self.fatal("discriminator values can only be used with a c-like \
|
||||||
enum");
|
enum");
|
||||||
}
|
}
|
||||||
|
|
|
@ -218,7 +218,7 @@ pub fn to_str(input: @IdentInterner, t: &Token) -> ~str {
|
||||||
&NtAttr(e) => ::print::pprust::attribute_to_str(e, input),
|
&NtAttr(e) => ::print::pprust::attribute_to_str(e, input),
|
||||||
_ => {
|
_ => {
|
||||||
~"an interpolated " +
|
~"an interpolated " +
|
||||||
match (*nt) {
|
match *nt {
|
||||||
NtItem(..) => ~"item",
|
NtItem(..) => ~"item",
|
||||||
NtBlock(..) => ~"block",
|
NtBlock(..) => ~"block",
|
||||||
NtStmt(..) => ~"statement",
|
NtStmt(..) => ~"statement",
|
||||||
|
|
|
@ -774,7 +774,7 @@ pub fn print_tt(s: &mut State, tt: &ast::TokenTree) {
|
||||||
word(&mut s.s, "$(");
|
word(&mut s.s, "$(");
|
||||||
for tt_elt in (*tts).iter() { print_tt(s, tt_elt); }
|
for tt_elt in (*tts).iter() { print_tt(s, tt_elt); }
|
||||||
word(&mut s.s, ")");
|
word(&mut s.s, ")");
|
||||||
match (*sep) {
|
match *sep {
|
||||||
Some(ref tk) => word(&mut s.s, parse::token::to_str(s.intr, tk)),
|
Some(ref tk) => word(&mut s.s, parse::token::to_str(s.intr, tk)),
|
||||||
None => ()
|
None => ()
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,7 +140,7 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool {
|
||||||
fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
|
fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
|
||||||
let mut i = idx;
|
let mut i = idx;
|
||||||
let len = a.len();
|
let len = a.len();
|
||||||
while ((i < len) && (is_whitespace(a.char_at(i)))) {
|
while (i < len) && (is_whitespace(a.char_at(i))) {
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
i
|
i
|
||||||
|
|
|
@ -34,7 +34,7 @@ struct CreatureInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn show_color(cc: color) -> ~str {
|
fn show_color(cc: color) -> ~str {
|
||||||
match (cc) {
|
match cc {
|
||||||
Red => {~"red"}
|
Red => {~"red"}
|
||||||
Yellow => {~"yellow"}
|
Yellow => {~"yellow"}
|
||||||
Blue => {~"blue"}
|
Blue => {~"blue"}
|
||||||
|
@ -51,7 +51,7 @@ fn show_color_list(set: ~[color]) -> ~str {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn show_digit(nn: uint) -> ~str {
|
fn show_digit(nn: uint) -> ~str {
|
||||||
match (nn) {
|
match nn {
|
||||||
0 => {~"zero"}
|
0 => {~"zero"}
|
||||||
1 => {~"one"}
|
1 => {~"one"}
|
||||||
2 => {~"two"}
|
2 => {~"two"}
|
||||||
|
|
|
@ -37,7 +37,7 @@ fn start(n_tasks: int, token: int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn roundtrip(id: int, n_tasks: int, p: &Port<int>, ch: &Chan<int>) {
|
fn roundtrip(id: int, n_tasks: int, p: &Port<int>, ch: &Chan<int>) {
|
||||||
while (true) {
|
loop {
|
||||||
match p.recv() {
|
match p.recv() {
|
||||||
1 => {
|
1 => {
|
||||||
println!("{}\n", id);
|
println!("{}\n", id);
|
||||||
|
|
|
@ -110,7 +110,7 @@ impl Sudoku {
|
||||||
|
|
||||||
let mut ptr = 0u;
|
let mut ptr = 0u;
|
||||||
let end = work.len();
|
let end = work.len();
|
||||||
while (ptr < end) {
|
while ptr < end {
|
||||||
let (row, col) = work[ptr];
|
let (row, col) = work[ptr];
|
||||||
// is there another color to try?
|
// is there another color to try?
|
||||||
if self.next_color(row, col, self.grid[row][col] + (1 as u8)) {
|
if self.next_color(row, col, self.grid[row][col] + (1 as u8)) {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue