1
Fork 0

auto merge of #19242 : jakub-/rust/roll-up, r=jakub-

This commit is contained in:
bors 2014-11-23 20:26:58 +00:00
commit 4e5259503c
56 changed files with 578 additions and 550 deletions

29
configure vendored
View file

@ -546,22 +546,26 @@ CFG_TARGET=$(to_llvm_triple $CFG_TARGET)
# there's no rpath. This is where the build system itself puts libraries; # there's no rpath. This is where the build system itself puts libraries;
# --libdir is used to configure the installation directory. # --libdir is used to configure the installation directory.
# FIXME: This needs to parameterized over target triples. Do it in platform.mk # FIXME: This needs to parameterized over target triples. Do it in platform.mk
CFG_LIBDIR_RELATIVE=lib
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] if [ "$CFG_OSTYPE" = "pc-windows-gnu" ]
then then
CFG_LIBDIR_RELATIVE=bin CFG_LIBDIR_RELATIVE=bin
CFG_LIBDIR="${CFG_PREFIX}/${CFG_LIBDIR_RELATIVE}"
else else
valopt libdir "${CFG_PREFIX}/${CFG_LIBDIR_RELATIVE}" "install libraries (ignored on windows platform)" CFG_LIBDIR_RELATIVE=lib
fi
case "$CFG_LIBDIR" in valopt libdir "${CFG_PREFIX}/${CFG_LIBDIR_RELATIVE}" "install libraries (do not set it on windows platform)"
"$CFG_PREFIX"/*) CAT_INC=2;;
"$CFG_PREFIX"*) CAT_INC=1;;
*)
err "libdir must begin with the prefix. Use --prefix to set it accordingly.";;
esac
CFG_LIBDIR_RELATIVE=`echo ${CFG_LIBDIR} | cut -c$((${#CFG_PREFIX}+${CAT_INC}))-` case "$CFG_LIBDIR" in
"$CFG_PREFIX"/*) CAT_INC=2;;
"$CFG_PREFIX"*) CAT_INC=1;;
*)
err "libdir must begin with the prefix. Use --prefix to set it accordingly.";;
esac
CFG_LIBDIR_RELATIVE=`echo ${CFG_LIBDIR} | cut -c$((${#CFG_PREFIX}+${CAT_INC}))-`
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] && [ "$CFG_LIBDIR_RELATIVE" != "bin" ]; then
err "libdir on windows should be set to 'bin'"
fi fi
if [ $HELP -eq 1 ] if [ $HELP -eq 1 ]
@ -711,11 +715,6 @@ then
fi fi
step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV" step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV"
putvar CFG_LOCAL_RUST_ROOT putvar CFG_LOCAL_RUST_ROOT
else
if [ ! -z "$CFG_LOCAL_RUST_ROOT" ]
then
warn "Use of --local-rust-root without --enable-local-rust"
fi
fi fi
# Force freebsd to build with clang; gcc doesn't like us there # Force freebsd to build with clang; gcc doesn't like us there

View file

@ -190,11 +190,14 @@ endif
# Target-and-rule "utility variables" # Target-and-rule "utility variables"
###################################################################### ######################################################################
define DEF_X define DEF_FOR_TARGET
X_$(1) := $(CFG_EXE_SUFFIX_$(1)) X_$(1) := $(CFG_EXE_SUFFIX_$(1))
ifndef CFG_LLVM_TARGET_$(1)
CFG_LLVM_TARGET_$(1) := $(1)
endif
endef endef
$(foreach target,$(CFG_TARGET), \ $(foreach target,$(CFG_TARGET), \
$(eval $(call DEF_X,$(target)))) $(eval $(call DEF_FOR_TARGET,$(target))))
# "Source" files we generate in builddir along the way. # "Source" files we generate in builddir along the way.
GENERATED := GENERATED :=

View file

@ -75,7 +75,7 @@ $$(RT_OUTPUT_DIR_$(1))/%.o: $(S)src/rt/%.ll $$(MKFILE_DEPS) \
@mkdir -p $$(@D) @mkdir -p $$(@D)
@$$(call E, compile: $$@) @$$(call E, compile: $$@)
$$(Q)$$(LLC_$$(CFG_BUILD)) $$(CFG_LLC_FLAGS_$(1)) \ $$(Q)$$(LLC_$$(CFG_BUILD)) $$(CFG_LLC_FLAGS_$(1)) \
-filetype=obj -mtriple=$(1) -relocation-model=pic -o $$@ $$< -filetype=obj -mtriple=$$(CFG_LLVM_TARGET_$(1)) -relocation-model=pic -o $$@ $$<
$$(RT_OUTPUT_DIR_$(1))/%.o: $(S)src/rt/%.c $$(MKFILE_DEPS) $$(RT_OUTPUT_DIR_$(1))/%.o: $(S)src/rt/%.c $$(MKFILE_DEPS)
@mkdir -p $$(@D) @mkdir -p $$(@D)

View file

@ -22,7 +22,7 @@ ifdef CFG_ENABLE_LOCAL_RUST
else else
$(Q)$(CFG_PYTHON) $(S)src/etc/get-snapshot.py $(CFG_BUILD) $(SNAPSHOT_FILE) $(Q)$(CFG_PYTHON) $(S)src/etc/get-snapshot.py $(CFG_BUILD) $(SNAPSHOT_FILE)
endif endif
$(Q)touch $@ $(Q)if [ -e "$@" ]; then touch "$@"; else echo "ERROR: snapshot $@ not found"; exit 1; fi
# For other targets, let the host build the target: # For other targets, let the host build the target:

View file

@ -116,7 +116,7 @@ $$(TBIN$(1)_T_$(2)_H_$(3))/$(4)$$(X_$(2)): \
$$(foreach dep,$$(TOOL_DEPS_$(4)), \ $$(foreach dep,$$(TOOL_DEPS_$(4)), \
$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep)) \ $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep)) \
$$(TSREQ$(1)_T_$(2)_H_$(3)) \ $$(TSREQ$(1)_T_$(2)_H_$(3)) \
| $$(TBIN$(1)_T_$(4)_H_$(3))/ | $$(TBIN$(1)_T_$(2)_H_$(3))/
@$$(call E, rustc: $$@) @$$(call E, rustc: $$@)
$$(STAGE$(1)_T_$(2)_H_$(3)) -o $$@ $$< --cfg $(4) $$(STAGE$(1)_T_$(2)_H_$(3)) -o $$@ $$< --cfg $(4)

View file

@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use std::ascii::AsciiExt;
use std::io::{BufferedReader, File}; use std::io::{BufferedReader, File};
use regex::Regex; use regex::Regex;
@ -31,7 +32,7 @@ pub fn load_errors(re: &Regex, testfile: &Path) -> Vec<ExpectedError> {
fn parse_expected(line_num: uint, line: &str, re: &Regex) -> Option<ExpectedError> { fn parse_expected(line_num: uint, line: &str, re: &Regex) -> Option<ExpectedError> {
re.captures(line).and_then(|caps| { re.captures(line).and_then(|caps| {
let adjusts = caps.name("adjusts").len(); let adjusts = caps.name("adjusts").len();
let kind = caps.name("kind").to_ascii().to_lowercase().into_string(); let kind = caps.name("kind").to_ascii_lower();
let msg = caps.name("msg").trim().to_string(); let msg = caps.name("msg").trim().to_string();
debug!("line={} kind={} msg={}", line_num, kind, msg); debug!("line={} kind={} msg={}", line_num, kind, msg);

View file

@ -7,7 +7,7 @@
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
#[cfg(not(stage0))]
use self::TargetLocation::*; use self::TargetLocation::*;
use common::Config; use common::Config;
@ -990,7 +990,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
let i = s.chars(); let i = s.chars();
let c : Vec<char> = i.map( |c| { let c : Vec<char> = i.map( |c| {
if c.is_ascii() { if c.is_ascii() {
c.to_ascii().to_lowercase().to_char() c.to_ascii().to_lowercase().as_char()
} else { } else {
c c
} }
@ -1161,7 +1161,7 @@ fn compile_test_(config: &Config, props: &TestProps,
let args = make_compile_args(config, let args = make_compile_args(config,
props, props,
link_args, link_args,
|a, b| ThisFile(make_exe_name(a, b)), testfile); |a, b| TargetLocation::ThisFile(make_exe_name(a, b)), testfile);
compose_and_run_compiler(config, props, testfile, args, None) compose_and_run_compiler(config, props, testfile, args, None)
} }
@ -1219,7 +1219,7 @@ fn compose_and_run_compiler(
crate_type, crate_type,
|a,b| { |a,b| {
let f = make_lib_name(a, b, testfile); let f = make_lib_name(a, b, testfile);
ThisDirectory(f.dir_path()) TargetLocation::ThisDirectory(f.dir_path())
}, },
&abs_ab); &abs_ab);
let auxres = compose_and_run(config, let auxres = compose_and_run(config,
@ -1296,11 +1296,11 @@ fn make_compile_args(config: &Config,
args.push("prefer-dynamic".to_string()); args.push("prefer-dynamic".to_string());
} }
let path = match xform_file { let path = match xform_file {
ThisFile(path) => { TargetLocation::ThisFile(path) => {
args.push("-o".to_string()); args.push("-o".to_string());
path path
} }
ThisDirectory(path) => { TargetLocation::ThisDirectory(path) => {
args.push("--out-dir".to_string()); args.push("--out-dir".to_string());
path path
} }
@ -1672,7 +1672,8 @@ fn compile_test_and_save_bitcode(config: &Config, props: &TestProps,
let args = make_compile_args(config, let args = make_compile_args(config,
props, props,
link_args, link_args,
|a, b| ThisDirectory(output_base_name(a, b).dir_path()), |a, b| TargetLocation::ThisDirectory(
output_base_name(a, b).dir_path()),
testfile); testfile);
compose_and_run_compiler(config, props, testfile, args, None) compose_and_run_compiler(config, props, testfile, args, None)
} }

View file

@ -8,8 +8,6 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
#![no_start]
#[cfg(rustdoc)] #[cfg(rustdoc)]
extern crate "rustdoc" as this; extern crate "rustdoc" as this;

View file

@ -43,8 +43,6 @@ def print_struct_val(val, internal_dict):
return print_struct_val_starting_from(0, val, internal_dict) return print_struct_val_starting_from(0, val, internal_dict)
def print_vec_slice_val(val, internal_dict): def print_vec_slice_val(val, internal_dict):
output = "&["
length = val.GetChildAtIndex(1).GetValueAsUnsigned() length = val.GetChildAtIndex(1).GetValueAsUnsigned()
data_ptr_val = val.GetChildAtIndex(0) data_ptr_val = val.GetChildAtIndex(0)
@ -56,16 +54,12 @@ def print_vec_slice_val(val, internal_dict):
start_address = data_ptr_val.GetValueAsUnsigned() start_address = data_ptr_val.GetValueAsUnsigned()
for i in range(length): def render_element(i):
address = start_address + i * element_type_size address = start_address + i * element_type_size
element_val = val.CreateValueFromAddress( val.GetName() + ("[%s]" % i), address, element_type ) element_val = val.CreateValueFromAddress( val.GetName() + ("[%s]" % i), address, element_type)
output += print_val(element_val, internal_dict) return print_val(element_val, internal_dict)
if i != length - 1: return "&[%s]" % (', '.join([render_element(i) for i in range(length)]))
output += ", "
output += "]"
return output
def print_struct_val_starting_from(field_start_index, val, internal_dict): def print_struct_val_starting_from(field_start_index, val, internal_dict):
''' '''
@ -77,39 +71,33 @@ def print_struct_val_starting_from(field_start_index, val, internal_dict):
t = val.GetType() t = val.GetType()
has_field_names = type_has_field_names(t) has_field_names = type_has_field_names(t)
type_name = extract_type_name(t.GetName()) type_name = extract_type_name(t.GetName())
output = ""
if not type_name.startswith("("):
# this is a tuple, so don't print the type name
output += type_name
if has_field_names: if has_field_names:
output += " { \n" template = "%(type_name)s {\n%(body)s\n}"
separator = ", \n"
else: else:
output += "(" template = "%(type_name)s(%(body)s)"
separator = ", "
if type_name.startswith("("):
# this is a tuple, so don't print the type name
type_name = ""
num_children = val.num_children num_children = val.num_children
for child_index in range(field_start_index, num_children): def render_child(child_index):
this = ""
if has_field_names: if has_field_names:
field_name = t.GetFieldAtIndex(child_index).GetName() field_name = t.GetFieldAtIndex(child_index).GetName()
output += field_name + ": " this += field_name + ": "
field_val = val.GetChildAtIndex(child_index) field_val = val.GetChildAtIndex(child_index)
output += print_val(field_val, internal_dict) return this + print_val(field_val, internal_dict)
if child_index != num_children - 1: body = separator.join([render_child(idx) for idx in range(field_start_index, num_children)])
output += ", "
if has_field_names: return template % {"type_name": type_name,
output += "\n" "body": body}
if has_field_names:
output += "}"
else:
output += ")"
return output
def print_enum_val(val, internal_dict): def print_enum_val(val, internal_dict):
@ -243,3 +231,5 @@ def is_vec_slice(val):
type_name = extract_type_name(ty.GetName()).replace("&'static", "&").replace(" ", "") type_name = extract_type_name(ty.GetName()).replace("&'static", "&").replace(" ", "")
return type_name.startswith("&[") and type_name.endswith("]") return type_name.startswith("&[") and type_name.endswith("]")
# vi: sw=2:ts=2

View file

@ -119,6 +119,16 @@ impl<T> Arc<T> {
} }
} }
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(atomic::SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(atomic::SeqCst) }
#[unstable = "waiting on stability of Clone"] #[unstable = "waiting on stability of Clone"]
impl<T> Clone for Arc<T> { impl<T> Clone for Arc<T> {
/// Duplicate an atomically reference counted wrapper. /// Duplicate an atomically reference counted wrapper.
@ -321,7 +331,7 @@ mod tests {
use std::sync::atomic; use std::sync::atomic;
use std::task; use std::task;
use std::vec::Vec; use std::vec::Vec;
use super::{Arc, Weak}; use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex; use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUint); struct Canary(*mut atomic::AtomicUint);
@ -465,6 +475,49 @@ mod tests {
drop(arc_weak); drop(arc_weak);
} }
#[test]
fn test_strong_count() {
let a = Arc::new(0u32);
assert!(strong_count(&a) == 1);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
let b = w.upgrade().expect("");
assert!(strong_count(&b) == 2);
assert!(strong_count(&a) == 2);
drop(w);
drop(a);
assert!(strong_count(&b) == 1);
let c = b.clone();
assert!(strong_count(&b) == 2);
assert!(strong_count(&c) == 2);
}
#[test]
fn test_weak_count() {
let a = Arc::new(0u32);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 1);
let x = w.clone();
assert!(weak_count(&a) == 2);
drop(w);
drop(x);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let c = a.clone();
assert!(strong_count(&a) == 2);
assert!(weak_count(&a) == 0);
let d = c.downgrade();
assert!(weak_count(&c) == 1);
assert!(strong_count(&c) == 2);
drop(a);
drop(c);
drop(d);
}
#[test] #[test]
fn show_arc() { fn show_arc() {
let a = Arc::new(5u32); let a = Arc::new(5u32);

View file

@ -15,7 +15,6 @@ use core::clone::Clone;
use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering}; use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering};
use core::default::Default; use core::default::Default;
use core::fmt; use core::fmt;
use core::intrinsics;
use core::kinds::Sized; use core::kinds::Sized;
use core::mem; use core::mem;
use core::option::Option; use core::option::Option;
@ -104,17 +103,14 @@ pub trait BoxAny {
} }
#[stable] #[stable]
impl BoxAny for Box<Any+'static> { impl BoxAny for Box<Any> {
#[inline] #[inline]
fn downcast<T: 'static>(self) -> Result<Box<T>, Box<Any+'static>> { fn downcast<T: 'static>(self) -> Result<Box<T>, Box<Any>> {
if self.is::<T>() { if self.is::<T>() {
unsafe { unsafe {
// Get the raw representation of the trait object // Get the raw representation of the trait object
let to: TraitObject = let to: TraitObject =
*mem::transmute::<&Box<Any>, &TraitObject>(&self); mem::transmute::<Box<Any>, TraitObject>(self);
// Prevent destructor on self being run
intrinsics::forget(self);
// Extract the data pointer // Extract the data pointer
Ok(mem::transmute(to.data)) Ok(mem::transmute(to.data))

View file

@ -213,6 +213,16 @@ impl<T> Rc<T> {
} }
} }
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Rc<T>) -> uint { this.weak() - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Rc<T>) -> uint { this.strong() }
/// Returns true if the `Rc` currently has unique ownership. /// Returns true if the `Rc` currently has unique ownership.
/// ///
/// Unique ownership means that there are no other `Rc` or `Weak` values /// Unique ownership means that there are no other `Rc` or `Weak` values
@ -220,8 +230,7 @@ impl<T> Rc<T> {
#[inline] #[inline]
#[experimental] #[experimental]
pub fn is_unique<T>(rc: &Rc<T>) -> bool { pub fn is_unique<T>(rc: &Rc<T>) -> bool {
// note that we hold both a strong and a weak reference weak_count(rc) == 0 && strong_count(rc) == 1
rc.strong() == 1 && rc.weak() == 1
} }
/// Unwraps the contained value if the `Rc` has unique ownership. /// Unwraps the contained value if the `Rc` has unique ownership.
@ -489,7 +498,7 @@ impl<T> RcBoxPtr<T> for Weak<T> {
#[cfg(test)] #[cfg(test)]
#[allow(experimental)] #[allow(experimental)]
mod tests { mod tests {
use super::{Rc, Weak}; use super::{Rc, Weak, weak_count, strong_count};
use std::cell::RefCell; use std::cell::RefCell;
use std::option::{Option, Some, None}; use std::option::{Option, Some, None};
use std::result::{Err, Ok}; use std::result::{Err, Ok};
@ -566,6 +575,40 @@ mod tests {
assert!(super::is_unique(&x)); assert!(super::is_unique(&x));
} }
#[test]
fn test_strong_count() {
let a = Rc::new(0u32);
assert!(strong_count(&a) == 1);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
let b = w.upgrade().expect("upgrade of live rc failed");
assert!(strong_count(&b) == 2);
assert!(strong_count(&a) == 2);
drop(w);
drop(a);
assert!(strong_count(&b) == 1);
let c = b.clone();
assert!(strong_count(&b) == 2);
assert!(strong_count(&c) == 2);
}
#[test]
fn test_weak_count() {
let a = Rc::new(0u32);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let w = a.downgrade();
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 1);
drop(w);
assert!(strong_count(&a) == 1);
assert!(weak_count(&a) == 0);
let c = a.clone();
assert!(strong_count(&a) == 2);
assert!(weak_count(&a) == 0);
drop(c);
}
#[test] #[test]
fn try_unwrap() { fn try_unwrap() {
let x = Rc::new(3u); let x = Rc::new(3u);

View file

@ -68,15 +68,15 @@
//! // dist[node] = current shortest distance from `start` to `node` //! // dist[node] = current shortest distance from `start` to `node`
//! let mut dist = Vec::from_elem(adj_list.len(), uint::MAX); //! let mut dist = Vec::from_elem(adj_list.len(), uint::MAX);
//! //!
//! let mut pq = BinaryHeap::new(); //! let mut heap = BinaryHeap::new();
//! //!
//! // We're at `start`, with a zero cost //! // We're at `start`, with a zero cost
//! dist[start] = 0u; //! dist[start] = 0u;
//! pq.push(State { cost: 0u, position: start }); //! heap.push(State { cost: 0u, position: start });
//! //!
//! // Examine the frontier with lower cost nodes first (min-heap) //! // Examine the frontier with lower cost nodes first (min-heap)
//! loop { //! loop {
//! let State { cost, position } = match pq.pop() { //! let State { cost, position } = match heap.pop() {
//! None => break, // empty //! None => break, // empty
//! Some(s) => s //! Some(s) => s
//! }; //! };
@ -94,7 +94,7 @@
//! //!
//! // If so, add it to the frontier and continue //! // If so, add it to the frontier and continue
//! if next.cost < dist[next.position] { //! if next.cost < dist[next.position] {
//! pq.push(next); //! heap.push(next);
//! // Relaxation, we have now found a better way //! // Relaxation, we have now found a better way
//! dist[next.position] = next.cost; //! dist[next.position] = next.cost;
//! } //! }
@ -184,7 +184,7 @@ impl<T: Ord> BinaryHeap<T> {
/// ///
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// let pq: BinaryHeap<uint> = BinaryHeap::new(); /// let heap: BinaryHeap<uint> = BinaryHeap::new();
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn new() -> BinaryHeap<T> { BinaryHeap{data: vec!(),} } pub fn new() -> BinaryHeap<T> { BinaryHeap{data: vec!(),} }
@ -198,7 +198,7 @@ impl<T: Ord> BinaryHeap<T> {
/// ///
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// let pq: BinaryHeap<uint> = BinaryHeap::with_capacity(10u); /// let heap: BinaryHeap<uint> = BinaryHeap::with_capacity(10u);
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn with_capacity(capacity: uint) -> BinaryHeap<T> { pub fn with_capacity(capacity: uint) -> BinaryHeap<T> {
@ -212,7 +212,7 @@ impl<T: Ord> BinaryHeap<T> {
/// ///
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// let pq = BinaryHeap::from_vec(vec![9i, 1, 2, 7, 3, 2]); /// let heap = BinaryHeap::from_vec(vec![9i, 1, 2, 7, 3, 2]);
/// ``` /// ```
pub fn from_vec(xs: Vec<T>) -> BinaryHeap<T> { pub fn from_vec(xs: Vec<T>) -> BinaryHeap<T> {
let mut q = BinaryHeap{data: xs,}; let mut q = BinaryHeap{data: xs,};
@ -231,10 +231,10 @@ impl<T: Ord> BinaryHeap<T> {
/// ///
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// let pq = BinaryHeap::from_vec(vec![1i, 2, 3, 4]); /// let heap = BinaryHeap::from_vec(vec![1i, 2, 3, 4]);
/// ///
/// // Print 1, 2, 3, 4 in arbitrary order /// // Print 1, 2, 3, 4 in arbitrary order
/// for x in pq.iter() { /// for x in heap.iter() {
/// println!("{}", x); /// println!("{}", x);
/// } /// }
/// ``` /// ```
@ -250,13 +250,13 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq = BinaryHeap::new(); /// let mut heap = BinaryHeap::new();
/// assert_eq!(pq.top(), None); /// assert_eq!(heap.top(), None);
/// ///
/// pq.push(1i); /// heap.push(1i);
/// pq.push(5i); /// heap.push(5i);
/// pq.push(2i); /// heap.push(2i);
/// assert_eq!(pq.top(), Some(&5i)); /// assert_eq!(heap.top(), Some(&5i));
/// ///
/// ``` /// ```
pub fn top<'a>(&'a self) -> Option<&'a T> { pub fn top<'a>(&'a self) -> Option<&'a T> {
@ -270,8 +270,8 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let pq: BinaryHeap<uint> = BinaryHeap::with_capacity(100u); /// let heap: BinaryHeap<uint> = BinaryHeap::with_capacity(100u);
/// assert!(pq.capacity() >= 100u); /// assert!(heap.capacity() >= 100u);
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn capacity(&self) -> uint { self.data.capacity() } pub fn capacity(&self) -> uint { self.data.capacity() }
@ -292,9 +292,9 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq: BinaryHeap<uint> = BinaryHeap::new(); /// let mut heap: BinaryHeap<uint> = BinaryHeap::new();
/// pq.reserve_exact(100u); /// heap.reserve_exact(100u);
/// assert!(pq.capacity() >= 100u); /// assert!(heap.capacity() >= 100u);
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn reserve_exact(&mut self, additional: uint) { self.data.reserve_exact(additional) } pub fn reserve_exact(&mut self, additional: uint) { self.data.reserve_exact(additional) }
@ -311,9 +311,9 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq: BinaryHeap<uint> = BinaryHeap::new(); /// let mut heap: BinaryHeap<uint> = BinaryHeap::new();
/// pq.reserve(100u); /// heap.reserve(100u);
/// assert!(pq.capacity() >= 100u); /// assert!(heap.capacity() >= 100u);
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn reserve(&mut self, additional: uint) { pub fn reserve(&mut self, additional: uint) {
@ -334,11 +334,11 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq = BinaryHeap::from_vec(vec![1i, 3]); /// let mut heap = BinaryHeap::from_vec(vec![1i, 3]);
/// ///
/// assert_eq!(pq.pop(), Some(3i)); /// assert_eq!(heap.pop(), Some(3i));
/// assert_eq!(pq.pop(), Some(1i)); /// assert_eq!(heap.pop(), Some(1i));
/// assert_eq!(pq.pop(), None); /// assert_eq!(heap.pop(), None);
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn pop(&mut self) -> Option<T> { pub fn pop(&mut self) -> Option<T> {
@ -361,13 +361,13 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq = BinaryHeap::new(); /// let mut heap = BinaryHeap::new();
/// pq.push(3i); /// heap.push(3i);
/// pq.push(5i); /// heap.push(5i);
/// pq.push(1i); /// heap.push(1i);
/// ///
/// assert_eq!(pq.len(), 3); /// assert_eq!(heap.len(), 3);
/// assert_eq!(pq.top(), Some(&5i)); /// assert_eq!(heap.top(), Some(&5i));
/// ``` /// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"] #[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn push(&mut self, item: T) { pub fn push(&mut self, item: T) {
@ -384,14 +384,14 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq = BinaryHeap::new(); /// let mut heap = BinaryHeap::new();
/// pq.push(1i); /// heap.push(1i);
/// pq.push(5i); /// heap.push(5i);
/// ///
/// assert_eq!(pq.push_pop(3i), 5); /// assert_eq!(heap.push_pop(3i), 5);
/// assert_eq!(pq.push_pop(9i), 9); /// assert_eq!(heap.push_pop(9i), 9);
/// assert_eq!(pq.len(), 2); /// assert_eq!(heap.len(), 2);
/// assert_eq!(pq.top(), Some(&3i)); /// assert_eq!(heap.top(), Some(&3i));
/// ``` /// ```
pub fn push_pop(&mut self, mut item: T) -> T { pub fn push_pop(&mut self, mut item: T) -> T {
if !self.is_empty() && *self.top().unwrap() > item { if !self.is_empty() && *self.top().unwrap() > item {
@ -410,12 +410,12 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq = BinaryHeap::new(); /// let mut heap = BinaryHeap::new();
/// ///
/// assert_eq!(pq.replace(1i), None); /// assert_eq!(heap.replace(1i), None);
/// assert_eq!(pq.replace(3i), Some(1i)); /// assert_eq!(heap.replace(3i), Some(1i));
/// assert_eq!(pq.len(), 1); /// assert_eq!(heap.len(), 1);
/// assert_eq!(pq.top(), Some(&3i)); /// assert_eq!(heap.top(), Some(&3i));
/// ``` /// ```
pub fn replace(&mut self, mut item: T) -> Option<T> { pub fn replace(&mut self, mut item: T) -> Option<T> {
if !self.is_empty() { if !self.is_empty() {
@ -436,8 +436,8 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let pq = BinaryHeap::from_vec(vec![1i, 2, 3, 4, 5, 6, 7]); /// let heap = BinaryHeap::from_vec(vec![1i, 2, 3, 4, 5, 6, 7]);
/// let vec = pq.into_vec(); /// let vec = heap.into_vec();
/// ///
/// // Will print in some order /// // Will print in some order
/// for x in vec.iter() { /// for x in vec.iter() {
@ -454,11 +454,11 @@ impl<T: Ord> BinaryHeap<T> {
/// ``` /// ```
/// use std::collections::BinaryHeap; /// use std::collections::BinaryHeap;
/// ///
/// let mut pq = BinaryHeap::from_vec(vec![1i, 2, 4, 5, 7]); /// let mut heap = BinaryHeap::from_vec(vec![1i, 2, 4, 5, 7]);
/// pq.push(6); /// heap.push(6);
/// pq.push(3); /// heap.push(3);
/// ///
/// let vec = pq.into_sorted_vec(); /// let vec = heap.into_sorted_vec();
/// assert_eq!(vec, vec![1i, 2, 3, 4, 5, 6, 7]); /// assert_eq!(vec, vec![1i, 2, 3, 4, 5, 6, 7]);
/// ``` /// ```
pub fn into_sorted_vec(self) -> Vec<T> { pub fn into_sorted_vec(self) -> Vec<T> {
@ -578,9 +578,9 @@ mod tests {
fn test_iterator() { fn test_iterator() {
let data = vec!(5i, 9, 3); let data = vec!(5i, 9, 3);
let iterout = [9i, 5, 3]; let iterout = [9i, 5, 3];
let pq = BinaryHeap::from_vec(data); let heap = BinaryHeap::from_vec(data);
let mut i = 0; let mut i = 0;
for el in pq.iter() { for el in heap.iter() {
assert_eq!(*el, iterout[i]); assert_eq!(*el, iterout[i]);
i += 1; i += 1;
} }

View file

@ -251,7 +251,7 @@ impl Default for SipHasher {
/// Hashes a value using the SipHash algorithm. /// Hashes a value using the SipHash algorithm.
#[inline] #[inline]
pub fn hash<T: Hash<SipState>>(value: &T) -> u64 { pub fn hash<Sized? T: Hash<SipState>>(value: &T) -> u64 {
let mut state = SipState::new(); let mut state = SipState::new();
value.hash(&mut state); value.hash(&mut state);
state.result() state.result()
@ -259,7 +259,7 @@ pub fn hash<T: Hash<SipState>>(value: &T) -> u64 {
/// Hashes a value with the SipHash algorithm with the provided keys. /// Hashes a value with the SipHash algorithm with the provided keys.
#[inline] #[inline]
pub fn hash_with_keys<T: Hash<SipState>>(k0: u64, k1: u64, value: &T) -> u64 { pub fn hash_with_keys<Sized? T: Hash<SipState>>(k0: u64, k1: u64, value: &T) -> u64 {
let mut state = SipState::new_with_keys(k0, k1); let mut state = SipState::new_with_keys(k0, k1);
value.hash(&mut state); value.hash(&mut state);
state.result() state.result()

View file

@ -71,7 +71,7 @@
#![stable] #![stable]
use mem::{transmute, transmute_copy}; use mem::{transmute};
use option::{Option, Some, None}; use option::{Option, Some, None};
use raw::TraitObject; use raw::TraitObject;
use intrinsics::TypeId; use intrinsics::TypeId;
@ -134,7 +134,7 @@ impl<'a> AnyRefExt<'a> for &'a Any {
if self.is::<T>() { if self.is::<T>() {
unsafe { unsafe {
// Get the raw representation of the trait object // Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self); let to: TraitObject = transmute(self);
// Extract the data pointer // Extract the data pointer
Some(transmute(to.data)) Some(transmute(to.data))
@ -162,7 +162,7 @@ impl<'a> AnyMutRefExt<'a> for &'a mut Any {
if self.is::<T>() { if self.is::<T>() {
unsafe { unsafe {
// Get the raw representation of the trait object // Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self); let to: TraitObject = transmute(self);
// Extract the data pointer // Extract the data pointer
Some(transmute(to.data)) Some(transmute(to.data))

View file

@ -179,7 +179,7 @@ pub trait Octal for Sized? {
fn fmt(&self, &mut Formatter) -> Result; fn fmt(&self, &mut Formatter) -> Result;
} }
/// Format trait for the `t` character /// Format trait for the `b` character
#[unstable = "I/O and core have yet to be reconciled"] #[unstable = "I/O and core have yet to be reconciled"]
pub trait Binary for Sized? { pub trait Binary for Sized? {
/// Formats the value using the given formatter. /// Formats the value using the given formatter.

View file

@ -57,7 +57,7 @@
//! //!
//! Pattern matching on `Result`s is clear and straightforward for //! Pattern matching on `Result`s is clear and straightforward for
//! simple cases, but `Result` comes with some convenience methods //! simple cases, but `Result` comes with some convenience methods
//! that make working it more succinct. //! that make working with it more succinct.
//! //!
//! ``` //! ```
//! let good_result: Result<int, int> = Ok(10); //! let good_result: Result<int, int> = Ok(10);

View file

@ -37,22 +37,18 @@ use util::ppaux::{ty_to_string};
use util::nodemap::{FnvHashMap, NodeSet}; use util::nodemap::{FnvHashMap, NodeSet};
use lint::{Context, LintPass, LintArray}; use lint::{Context, LintPass, LintArray};
use std::cmp; use std::{cmp, slice};
use std::collections::hash_map::{Occupied, Vacant}; use std::collections::hash_map::{Occupied, Vacant};
use std::num::SignedInt; use std::num::SignedInt;
use std::slice;
use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64};
use syntax::abi; use syntax::{abi, ast, ast_map};
use syntax::ast_map; use syntax::ast_util::{mod, is_shift_binop};
use syntax::ast_util::is_shift_binop; use syntax::attr::{mod, AttrMetaMethods};
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap::{Span, DUMMY_SP}; use syntax::codemap::{Span, DUMMY_SP};
use syntax::parse::token; use syntax::parse::token;
use syntax::{ast, ast_util, visit};
use syntax::ast::{TyI, TyU, TyI8, TyU8, TyI16, TyU16, TyI32, TyU32, TyI64, TyU64}; use syntax::ast::{TyI, TyU, TyI8, TyU8, TyI16, TyU16, TyI32, TyU32, TyI64, TyU64};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::visit::Visitor; use syntax::visit::{mod, Visitor};
declare_lint!(WHILE_TRUE, Warn, declare_lint!(WHILE_TRUE, Warn,
"suggest using `loop { }` instead of `while true { }`") "suggest using `loop { }` instead of `while true { }`")
@ -1112,8 +1108,8 @@ impl UnusedParens {
} }
ast::ExprUnary(_, ref x) | ast::ExprUnary(_, ref x) |
ast::ExprCast(ref x, _) | ast::ExprCast(ref x, _) |
ast::ExprField(ref x, _, _) | ast::ExprField(ref x, _) |
ast::ExprTupField(ref x, _, _) | ast::ExprTupField(ref x, _) |
ast::ExprIndex(ref x, _) => { ast::ExprIndex(ref x, _) => {
// &X { y: 1 }, X { y: 1 }.y // &X { y: 1 }, X { y: 1 }.y
contains_exterior_struct_lit(&**x) contains_exterior_struct_lit(&**x)

View file

@ -475,8 +475,8 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
ast::ExprCast(ref e, _) | ast::ExprCast(ref e, _) |
ast::ExprUnary(_, ref e) | ast::ExprUnary(_, ref e) |
ast::ExprParen(ref e) | ast::ExprParen(ref e) |
ast::ExprField(ref e, _, _) | ast::ExprField(ref e, _) |
ast::ExprTupField(ref e, _, _) => { ast::ExprTupField(ref e, _) => {
self.straightline(expr, pred, Some(&**e).into_iter()) self.straightline(expr, pred, Some(&**e).into_iter())
} }

View file

@ -15,19 +15,16 @@ pub use self::const_val::*;
pub use self::constness::*; pub use self::constness::*;
use metadata::csearch; use metadata::csearch;
use middle::astencode; use middle::{astencode, def};
use middle::def;
use middle::pat_util::def_to_path; use middle::pat_util::def_to_path;
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::typeck::astconv; use middle::typeck::{astconv, check};
use middle::typeck::check; use util::nodemap::DefIdMap;
use util::nodemap::{DefIdMap};
use syntax::ast::{mod, Expr}; use syntax::ast::{mod, Expr};
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::visit::Visitor; use syntax::visit::{mod, Visitor};
use syntax::visit;
use syntax::{ast_map, ast_util, codemap}; use syntax::{ast_map, ast_util, codemap};
use std::rc::Rc; use std::rc::Rc;
@ -234,9 +231,9 @@ impl<'a, 'tcx> ConstEvalVisitor<'a, 'tcx> {
} }
} }
ast::ExprField(ref base, _, _) => self.classify(&**base), ast::ExprField(ref base, _) => self.classify(&**base),
ast::ExprTupField(ref base, _, _) => self.classify(&**base), ast::ExprTupField(ref base, _) => self.classify(&**base),
ast::ExprIndex(ref base, ref idx) => ast::ExprIndex(ref base, ref idx) =>
join(self.classify(&**base), self.classify(&**idx)), join(self.classify(&**base), self.classify(&**idx)),

View file

@ -12,20 +12,14 @@
// closely. The idea is that all reachable symbols are live, codes called // closely. The idea is that all reachable symbols are live, codes called
// from live codes are live, and everything else is dead. // from live codes are live, and everything else is dead.
use middle::def; use middle::{def, pat_util, privacy, ty, typeck};
use middle::pat_util;
use middle::privacy;
use middle::ty;
use middle::typeck;
use lint; use lint;
use util::nodemap::NodeSet; use util::nodemap::NodeSet;
use std::collections::HashSet; use std::collections::HashSet;
use syntax::ast; use syntax::{ast, ast_map, codemap};
use syntax::ast_map;
use syntax::ast_util::{local_def, is_local, PostExpansionMethod}; use syntax::ast_util::{local_def, is_local, PostExpansionMethod};
use syntax::attr::{mod, AttrMetaMethods}; use syntax::attr::{mod, AttrMetaMethods};
use syntax::codemap;
use syntax::visit::{mod, Visitor}; use syntax::visit::{mod, Visitor};
// Any local node that may call something in its body block should be // Any local node that may call something in its body block should be
@ -277,10 +271,10 @@ impl<'a, 'tcx, 'v> Visitor<'v> for MarkSymbolVisitor<'a, 'tcx> {
ast::ExprMethodCall(..) => { ast::ExprMethodCall(..) => {
self.lookup_and_handle_method(expr.id, expr.span); self.lookup_and_handle_method(expr.id, expr.span);
} }
ast::ExprField(ref lhs, ref ident, _) => { ast::ExprField(ref lhs, ref ident) => {
self.handle_field_access(&**lhs, &ident.node); self.handle_field_access(&**lhs, &ident.node);
} }
ast::ExprTupField(ref lhs, idx, _) => { ast::ExprTupField(ref lhs, idx) => {
self.handle_tup_field_access(&**lhs, idx.node); self.handle_tup_field_access(&**lhs, idx.node);
} }
_ => () _ => ()

View file

@ -20,11 +20,9 @@ pub use self::ConsumeMode::*;
pub use self::MoveReason::*; pub use self::MoveReason::*;
use self::OverloadedCallType::*; use self::OverloadedCallType::*;
use middle::{def, region, pat_util};
use middle::mem_categorization as mc; use middle::mem_categorization as mc;
use middle::def;
use middle::mem_categorization::Typer; use middle::mem_categorization::Typer;
use middle::region;
use middle::pat_util;
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::typeck::{MethodCall, MethodObject, MethodTraitObject}; use middle::typeck::{MethodCall, MethodObject, MethodTraitObject};
use middle::typeck::{MethodOrigin, MethodParam, MethodTypeParam}; use middle::typeck::{MethodOrigin, MethodParam, MethodTypeParam};
@ -331,11 +329,11 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
} }
} }
ast::ExprField(ref base, _, _) => { // base.f ast::ExprField(ref base, _) => { // base.f
self.select_from_expr(&**base); self.select_from_expr(&**base);
} }
ast::ExprTupField(ref base, _, _) => { // base.<n> ast::ExprTupField(ref base, _) => { // base.<n>
self.select_from_expr(&**base); self.select_from_expr(&**base);
} }

View file

@ -113,24 +113,19 @@ use self::VarKind::*;
use middle::def::*; use middle::def::*;
use middle::mem_categorization::Typer; use middle::mem_categorization::Typer;
use middle::pat_util; use middle::{pat_util, typeck, ty};
use middle::typeck;
use middle::ty;
use lint; use lint;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use std::fmt; use std::{fmt, io, uint};
use std::io;
use std::rc::Rc; use std::rc::Rc;
use std::uint;
use syntax::ast::{mod, NodeId, Expr}; use syntax::ast::{mod, NodeId, Expr};
use syntax::codemap::{BytePos, original_sp, Span}; use syntax::codemap::{BytePos, original_sp, Span};
use syntax::parse::token::special_idents; use syntax::parse::token::{mod, special_idents};
use syntax::parse::token;
use syntax::print::pprust::{expr_to_string, block_to_string}; use syntax::print::pprust::{expr_to_string, block_to_string};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::{visit, ast_util}; use syntax::ast_util;
use syntax::visit::{Visitor, FnKind}; use syntax::visit::{mod, Visitor, FnKind};
/// For use with `propagate_through_loop`. /// For use with `propagate_through_loop`.
enum LoopKind<'a> { enum LoopKind<'a> {
@ -967,11 +962,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
self.access_path(expr, succ, ACC_READ | ACC_USE) self.access_path(expr, succ, ACC_READ | ACC_USE)
} }
ast::ExprField(ref e, _, _) => { ast::ExprField(ref e, _) => {
self.propagate_through_expr(&**e, succ) self.propagate_through_expr(&**e, succ)
} }
ast::ExprTupField(ref e, _, _) => { ast::ExprTupField(ref e, _) => {
self.propagate_through_expr(&**e, succ) self.propagate_through_expr(&**e, succ)
} }
@ -1295,8 +1290,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
match expr.node { match expr.node {
ast::ExprPath(_) => succ, ast::ExprPath(_) => succ,
ast::ExprField(ref e, _, _) => self.propagate_through_expr(&**e, succ), ast::ExprField(ref e, _) => self.propagate_through_expr(&**e, succ),
ast::ExprTupField(ref e, _, _) => self.propagate_through_expr(&**e, succ), ast::ExprTupField(ref e, _) => self.propagate_through_expr(&**e, succ),
_ => self.propagate_through_expr(expr, succ) _ => self.propagate_through_expr(expr, succ)
} }
} }

View file

@ -477,7 +477,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
Ok(self.cat_deref(expr, base_cmt, 0, false)) Ok(self.cat_deref(expr, base_cmt, 0, false))
} }
ast::ExprField(ref base, f_name, _) => { ast::ExprField(ref base, f_name) => {
let base_cmt = if_ok!(self.cat_expr(&**base)); let base_cmt = if_ok!(self.cat_expr(&**base));
debug!("cat_expr(cat_field): id={} expr={} base={}", debug!("cat_expr(cat_field): id={} expr={} base={}",
expr.id, expr.id,
@ -486,7 +486,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
Ok(self.cat_field(expr, base_cmt, f_name.node.name, expr_ty)) Ok(self.cat_field(expr, base_cmt, f_name.node.name, expr_ty))
} }
ast::ExprTupField(ref base, idx, _) => { ast::ExprTupField(ref base, idx) => {
let base_cmt = if_ok!(self.cat_expr(&**base)); let base_cmt = if_ok!(self.cat_expr(&**base));
Ok(self.cat_tup_field(expr, base_cmt, idx.node, expr_ty)) Ok(self.cat_tup_field(expr, base_cmt, idx.node, expr_ty))
} }

View file

@ -17,20 +17,17 @@ use self::FieldName::*;
use std::mem::replace; use std::mem::replace;
use metadata::csearch; use metadata::csearch;
use middle::def; use middle::{def, resolve};
use middle::resolve;
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::typeck::{MethodCall, MethodMap, MethodOrigin, MethodParam, MethodTypeParam}; use middle::typeck::{MethodCall, MethodMap, MethodOrigin, MethodParam, MethodTypeParam};
use middle::typeck::{MethodStatic, MethodStaticUnboxedClosure, MethodObject, MethodTraitObject}; use middle::typeck::{MethodStatic, MethodStaticUnboxedClosure, MethodObject, MethodTraitObject};
use util::nodemap::{NodeMap, NodeSet}; use util::nodemap::{NodeMap, NodeSet};
use syntax::ast; use syntax::{ast, ast_map};
use syntax::ast_map;
use syntax::ast_util::{is_local, local_def, PostExpansionMethod}; use syntax::ast_util::{is_local, local_def, PostExpansionMethod};
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token; use syntax::parse::token;
use syntax::visit; use syntax::visit::{mod, Visitor};
use syntax::visit::Visitor;
type Context<'a, 'tcx> = (&'a MethodMap<'tcx>, &'a resolve::ExportMap2); type Context<'a, 'tcx> = (&'a MethodMap<'tcx>, &'a resolve::ExportMap2);
@ -836,20 +833,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> {
fn visit_expr(&mut self, expr: &ast::Expr) { fn visit_expr(&mut self, expr: &ast::Expr) {
match expr.node { match expr.node {
ast::ExprField(ref base, ident, _) => { ast::ExprField(ref base, ident) => {
match ty::expr_ty_adjusted(self.tcx, &**base).sty { if let ty::ty_struct(id, _) = ty::expr_ty_adjusted(self.tcx, &**base).sty {
ty::ty_struct(id, _) => { self.check_field(expr.span, id, NamedField(ident.node));
self.check_field(expr.span, id, NamedField(ident.node));
}
_ => {}
} }
} }
ast::ExprTupField(ref base, idx, _) => { ast::ExprTupField(ref base, idx) => {
match ty::expr_ty_adjusted(self.tcx, &**base).sty { if let ty::ty_struct(id, _) = ty::expr_ty_adjusted(self.tcx, &**base).sty {
ty::ty_struct(id, _) => { self.check_field(expr.span, id, UnnamedField(idx.node));
self.check_field(expr.span, id, UnnamedField(idx.node));
}
_ => {}
} }
} }
ast::ExprMethodCall(ident, _, _) => { ast::ExprMethodCall(ident, _, _) => {

View file

@ -22,8 +22,7 @@ Most of the documentation on regions can be found in
use session::Session; use session::Session;
use middle::ty::{FreeRegion}; use middle::ty::{mod, Ty, FreeRegion};
use middle::ty::{mod, Ty};
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap}; use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use util::common::can_reach; use util::common::can_reach;
@ -33,7 +32,6 @@ use syntax::codemap::Span;
use syntax::{ast, visit}; use syntax::{ast, visit};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local}; use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
use syntax::ast_util::{stmt_id}; use syntax::ast_util::{stmt_id};
use syntax::ptr::P;
use syntax::visit::{Visitor, FnKind}; use syntax::visit::{Visitor, FnKind};
/// CodeExtent represents a statically-describable extent that can be /// CodeExtent represents a statically-describable extent that can be
@ -824,11 +822,10 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) {
match expr.node { match expr.node {
ast::ExprAddrOf(_, ref subexpr) | ast::ExprAddrOf(_, ref subexpr) |
ast::ExprUnary(ast::UnDeref, ref subexpr) | ast::ExprUnary(ast::UnDeref, ref subexpr) |
ast::ExprField(ref subexpr, _, _) | ast::ExprField(ref subexpr, _) |
ast::ExprTupField(ref subexpr, _, _) | ast::ExprTupField(ref subexpr, _) |
ast::ExprIndex(ref subexpr, _) | ast::ExprIndex(ref subexpr, _) |
ast::ExprParen(ref subexpr) => { ast::ExprParen(ref subexpr) => {
let subexpr: &'a P<Expr> = subexpr; // FIXME(#11586)
expr = &**subexpr; expr = &**subexpr;
} }
_ => { _ => {

View file

@ -71,17 +71,13 @@ use syntax::ast::{Variant, ViewItem, ViewItemExternCrate};
use syntax::ast::{ViewItemUse, ViewPathGlob, ViewPathList, ViewPathSimple}; use syntax::ast::{ViewItemUse, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::ast::{Visibility}; use syntax::ast::{Visibility};
use syntax::ast; use syntax::ast;
use syntax::ast_util::{PostExpansionMethod, local_def, walk_pat}; use syntax::ast_util::{mod, PostExpansionMethod, local_def, walk_pat};
use syntax::ast_util;
use syntax::attr::AttrMetaMethods; use syntax::attr::AttrMetaMethods;
use syntax::ext::mtwt; use syntax::ext::mtwt;
use syntax::parse::token::special_names; use syntax::parse::token::{mod, special_names, special_idents};
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::codemap::{Span, DUMMY_SP, Pos}; use syntax::codemap::{Span, DUMMY_SP, Pos};
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::visit; use syntax::visit::{mod, Visitor};
use syntax::visit::Visitor;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::collections::hash_map::{Occupied, Vacant}; use std::collections::hash_map::{Occupied, Vacant};
@ -5959,7 +5955,7 @@ impl<'a> Resolver<'a> {
fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) { fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) {
match expr.node { match expr.node {
ExprField(_, ident, _) => { ExprField(_, ident) => {
// FIXME(#6890): Even though you can't treat a method like a // FIXME(#6890): Even though you can't treat a method like a
// field, we need to add any trait methods we find that match // field, we need to add any trait methods we find that match
// the field name so that we can do some nice error reporting // the field name so that we can do some nice error reporting

View file

@ -2638,11 +2638,6 @@ impl ops::Sub<TypeContents,TypeContents> for TypeContents {
} }
impl fmt::Show for TypeContents { impl fmt::Show for TypeContents {
#[cfg(stage0)]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TypeContents({:t})", self.bits)
}
#[cfg(not(stage0))]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TypeContents({:b})", self.bits) write!(f, "TypeContents({:b})", self.bits)
} }

View file

@ -10,16 +10,13 @@
use super::probe; use super::probe;
use middle::subst; use middle::subst::{mod, Subst};
use middle::subst::Subst;
use middle::traits; use middle::traits;
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::typeck::check; use middle::typeck::check::{mod, FnCtxt, NoPreference, PreferMutLvalue};
use middle::typeck::check::{FnCtxt, NoPreference, PreferMutLvalue};
use middle::typeck::{MethodCall, MethodCallee, MethodObject, MethodOrigin, use middle::typeck::{MethodCall, MethodCallee, MethodObject, MethodOrigin,
MethodParam, MethodStatic, MethodTraitObject, MethodTypeParam}; MethodParam, MethodStatic, MethodTraitObject, MethodTypeParam};
use middle::typeck::infer; use middle::typeck::infer::{mod, InferCtxt};
use middle::typeck::infer::InferCtxt;
use middle::ty_fold::HigherRankedFoldable; use middle::ty_fold::HigherRankedFoldable;
use syntax::ast; use syntax::ast;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -510,8 +507,8 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> {
let last = exprs[exprs.len() - 1]; let last = exprs[exprs.len() - 1];
match last.node { match last.node {
ast::ExprParen(ref expr) | ast::ExprParen(ref expr) |
ast::ExprField(ref expr, _, _) | ast::ExprField(ref expr, _) |
ast::ExprTupField(ref expr, _, _) | ast::ExprTupField(ref expr, _) |
ast::ExprSlice(ref expr, _, _, _) | ast::ExprSlice(ref expr, _, _, _) |
ast::ExprIndex(ref expr, _) | ast::ExprIndex(ref expr, _) |
ast::ExprUnary(ast::UnDeref, ref expr) => exprs.push(&**expr), ast::ExprUnary(ast::UnDeref, ref expr) => exprs.push(&**expr),

View file

@ -83,62 +83,41 @@ use self::IsBinopAssignment::*;
use self::TupleArgumentsFlag::*; use self::TupleArgumentsFlag::*;
use session::Session; use session::Session;
use middle::const_eval; use middle::{const_eval, def, traits};
use middle::def;
use middle::lang_items::IteratorItem; use middle::lang_items::IteratorItem;
use middle::mem_categorization::McResult; use middle::mem_categorization::{mod, McResult};
use middle::mem_categorization; use middle::pat_util::{mod, pat_id_map};
use middle::pat_util::pat_id_map;
use middle::pat_util;
use middle::region::CodeExtent; use middle::region::CodeExtent;
use middle::subst; use middle::subst::{mod, Subst, Substs, VecPerParamSpace, ParamSpace};
use middle::subst::{Subst, Substs, VecPerParamSpace, ParamSpace}; use middle::ty::{FnSig, VariantInfo, Polytype};
use middle::traits;
use middle::ty::{FnSig, VariantInfo};
use middle::ty::{Polytype};
use middle::ty::{Disr, ParamTy, ParameterEnvironment}; use middle::ty::{Disr, ParamTy, ParameterEnvironment};
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::ty::liberate_late_bound_regions; use middle::ty::liberate_late_bound_regions;
use middle::ty_fold::TypeFolder; use middle::ty_fold::TypeFolder;
use middle::typeck::astconv::AstConv; use middle::typeck::astconv::{mod, ast_region_to_region, ast_ty_to_ty, AstConv};
use middle::typeck::astconv::{ast_region_to_region, ast_ty_to_ty};
use middle::typeck::astconv;
use middle::typeck::check::_match::pat_ctxt; use middle::typeck::check::_match::pat_ctxt;
use middle::typeck::CrateCtxt;
use middle::typeck::infer;
use middle::typeck::rscope::RegionScope; use middle::typeck::rscope::RegionScope;
use middle::typeck::{lookup_def_ccx}; use middle::typeck::{mod, CrateCtxt, infer, lookup_def_ccx, no_params, require_same_types};
use middle::typeck::no_params; use middle::typeck::{MethodCall, MethodCallee, MethodMap, ObjectCastMap, TypeAndSubsts};
use middle::typeck::{require_same_types};
use middle::typeck::{MethodCall, MethodCallee, MethodMap, ObjectCastMap};
use middle::typeck::{TypeAndSubsts};
use middle::typeck;
use middle::lang_items::TypeIdLangItem; use middle::lang_items::TypeIdLangItem;
use lint; use lint;
use util::common::{block_query, indenter, loop_query}; use util::common::{block_query, indenter, loop_query};
use util::ppaux; use util::ppaux::{mod, UserString, Repr};
use util::ppaux::{UserString, Repr};
use util::nodemap::{DefIdMap, FnvHashMap, NodeMap}; use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
use std::cell::{Cell, Ref, RefCell}; use std::cell::{Cell, Ref, RefCell};
use std::collections::hash_map::{Occupied, Vacant}; use std::collections::hash_map::{Occupied, Vacant};
use std::mem::replace; use std::mem::replace;
use std::rc::Rc; use std::rc::Rc;
use syntax::abi; use syntax::{mod, abi, attr};
use syntax::ast::{ProvidedMethod, RequiredMethod, TypeTraitItem}; use syntax::ast::{mod, ProvidedMethod, RequiredMethod, TypeTraitItem};
use syntax::ast; use syntax::ast_util::{mod, local_def, PostExpansionMethod};
use syntax::ast_util::{local_def, PostExpansionMethod}; use syntax::codemap::{mod, Span};
use syntax::ast_util;
use syntax::attr;
use syntax::codemap::Span;
use syntax::codemap;
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::parse::token; use syntax::parse::token;
use syntax::print::pprust; use syntax::print::pprust;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::visit; use syntax::visit::{mod, Visitor};
use syntax::visit::Visitor;
use syntax;
pub mod _match; pub mod _match;
pub mod vtable; pub mod vtable;
@ -4405,10 +4384,10 @@ fn check_expr_with_unifier<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized); fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
} }
ast::ExprField(ref base, ref field, _) => { ast::ExprField(ref base, ref field) => {
check_field(fcx, expr, lvalue_pref, &**base, field); check_field(fcx, expr, lvalue_pref, &**base, field);
} }
ast::ExprTupField(ref base, idx, _) => { ast::ExprTupField(ref base, idx) => {
check_tup_field(fcx, expr, lvalue_pref, &**base, idx); check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
} }
ast::ExprIndex(ref base, ref idx) => { ast::ExprIndex(ref base, ref idx) => {

View file

@ -278,8 +278,8 @@ mod svh_visitor {
ExprBlock(..) => SawExprBlock, ExprBlock(..) => SawExprBlock,
ExprAssign(..) => SawExprAssign, ExprAssign(..) => SawExprAssign,
ExprAssignOp(op, _, _) => SawExprAssignOp(op), ExprAssignOp(op, _, _) => SawExprAssignOp(op),
ExprField(_, id, _) => SawExprField(content(id.node)), ExprField(_, id) => SawExprField(content(id.node)),
ExprTupField(_, id, _) => SawExprTupField(id.node), ExprTupField(_, id) => SawExprTupField(id.node),
ExprIndex(..) => SawExprIndex, ExprIndex(..) => SawExprIndex,
ExprSlice(..) => SawExprSlice, ExprSlice(..) => SawExprSlice,
ExprPath(..) => SawExprPath, ExprPath(..) => SawExprPath,

View file

@ -30,34 +30,26 @@
use driver::driver::CrateAnalysis; use driver::driver::CrateAnalysis;
use session::Session; use session::Session;
use middle::def; use middle::{def, typeck};
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::typeck;
use std::cell::Cell; use std::cell::Cell;
use std::io; use std::io::{mod, File, fs};
use std::io::File;
use std::io::fs;
use std::os; use std::os;
use syntax::ast; use syntax::ast_util::{mod, PostExpansionMethod};
use syntax::ast_util; use syntax::ast::{mod, NodeId, DefId};
use syntax::ast_util::PostExpansionMethod;
use syntax::ast::{NodeId,DefId};
use syntax::ast_map::NodeItem; use syntax::ast_map::NodeItem;
use syntax::attr; use syntax::attr;
use syntax::codemap::*; use syntax::codemap::*;
use syntax::parse::token; use syntax::parse::token::{mod, get_ident, keywords};
use syntax::parse::token::{get_ident,keywords};
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::visit; use syntax::visit::{mod, Visitor};
use syntax::visit::Visitor;
use syntax::print::pprust::{path_to_string,ty_to_string}; use syntax::print::pprust::{path_to_string,ty_to_string};
use syntax::ptr::P; use syntax::ptr::P;
use self::span_utils::SpanUtils; use self::span_utils::SpanUtils;
use self::recorder::Recorder; use self::recorder::{Recorder, FmtStrs};
use self::recorder::FmtStrs;
use util::ppaux; use util::ppaux;
@ -568,13 +560,15 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
Some(node_id) => node_id, Some(node_id) => node_id,
None => -1, None => -1,
}; };
let val = self.span.snippet(item.span);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Struct); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Struct);
self.fmt.struct_str(item.span, self.fmt.struct_str(item.span,
sub_span, sub_span,
item.id, item.id,
ctor_id, ctor_id,
qualname.as_slice(), qualname.as_slice(),
self.cur_scope); self.cur_scope,
val.as_slice());
// fields // fields
for field in def.fields.iter() { for field in def.fields.iter() {
@ -589,21 +583,23 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
item: &ast::Item, item: &ast::Item,
enum_definition: &ast::EnumDef, enum_definition: &ast::EnumDef,
ty_params: &ast::Generics) { ty_params: &ast::Generics) {
let qualname = self.analysis.ty_cx.map.path_to_string(item.id); let enum_name = self.analysis.ty_cx.map.path_to_string(item.id);
let val = self.span.snippet(item.span);
match self.span.sub_span_after_keyword(item.span, keywords::Enum) { match self.span.sub_span_after_keyword(item.span, keywords::Enum) {
Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span) => self.fmt.enum_str(item.span,
Some(sub_span), Some(sub_span),
item.id, item.id,
qualname.as_slice(), enum_name.as_slice(),
self.cur_scope), self.cur_scope,
val.as_slice()),
None => self.sess.span_bug(item.span, None => self.sess.span_bug(item.span,
format!("Could not find subspan for enum {}", format!("Could not find subspan for enum {}",
qualname).as_slice()), enum_name).as_slice()),
} }
for variant in enum_definition.variants.iter() { for variant in enum_definition.variants.iter() {
let name = get_ident(variant.node.name); let name = get_ident(variant.node.name);
let name = name.get(); let name = name.get();
let mut qualname = qualname.clone(); let mut qualname = enum_name.clone();
qualname.push_str("::"); qualname.push_str("::");
qualname.push_str(name); qualname.push_str(name);
let val = self.span.snippet(variant.span); let val = self.span.snippet(variant.span);
@ -615,6 +611,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
variant.node.id, variant.node.id,
name, name,
qualname.as_slice(), qualname.as_slice(),
enum_name.as_slice(),
val.as_slice(), val.as_slice(),
item.id); item.id);
for arg in args.iter() { for arg in args.iter() {
@ -632,18 +629,19 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
variant.node.id, variant.node.id,
ctor_id, ctor_id,
qualname.as_slice(), qualname.as_slice(),
enum_name.as_slice(),
val.as_slice(), val.as_slice(),
item.id); item.id);
for field in struct_def.fields.iter() { for field in struct_def.fields.iter() {
self.process_struct_field_def(field, qualname.as_slice(), variant.node.id); self.process_struct_field_def(field, enum_name.as_slice(), variant.node.id);
self.visit_ty(&*field.node.ty); self.visit_ty(&*field.node.ty);
} }
} }
} }
} }
self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); self.process_generic_params(ty_params, item.span, enum_name.as_slice(), item.id);
} }
fn process_impl(&mut self, fn process_impl(&mut self,
@ -698,13 +696,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
trait_refs: &OwnedSlice<ast::TyParamBound>, trait_refs: &OwnedSlice<ast::TyParamBound>,
methods: &Vec<ast::TraitItem>) { methods: &Vec<ast::TraitItem>) {
let qualname = self.analysis.ty_cx.map.path_to_string(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let val = self.span.snippet(item.span);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait);
self.fmt.trait_str(item.span, self.fmt.trait_str(item.span,
sub_span, sub_span,
item.id, item.id,
qualname.as_slice(), qualname.as_slice(),
self.cur_scope); self.cur_scope,
val.as_slice());
// super-traits // super-traits
for super_bound in trait_refs.iter() { for super_bound in trait_refs.iter() {
@ -1293,7 +1292,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
ast::ExprStruct(ref path, ref fields, ref base) => ast::ExprStruct(ref path, ref fields, ref base) =>
self.process_struct_lit(ex, path, fields, base), self.process_struct_lit(ex, path, fields, base),
ast::ExprMethodCall(_, _, ref args) => self.process_method_call(ex, args), ast::ExprMethodCall(_, _, ref args) => self.process_method_call(ex, args),
ast::ExprField(ref sub_ex, ident, _) => { ast::ExprField(ref sub_ex, ident) => {
if generated_code(sub_ex.span) { if generated_code(sub_ex.span) {
return return
} }
@ -1319,7 +1318,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
"Expected struct type, but not ty_struct"), "Expected struct type, but not ty_struct"),
} }
}, },
ast::ExprTupField(ref sub_ex, idx, _) => { ast::ExprTupField(ref sub_ex, idx) => {
if generated_code(sub_ex.span) { if generated_code(sub_ex.span) {
return return
} }

View file

@ -106,15 +106,19 @@ impl<'a> FmtStrs<'a> {
Variable => ("variable", Variable => ("variable",
vec!("id","name","qualname","value","type","scopeid"), vec!("id","name","qualname","value","type","scopeid"),
true, true), true, true),
Enum => ("enum", vec!("id","qualname","scopeid"), true, true), Enum => ("enum", vec!("id","qualname","scopeid","value"), true, true),
Variant => ("variant", vec!("id","name","qualname","value","scopeid"), true, true), Variant => ("variant",
vec!("id","name","qualname","type","value","scopeid"),
true, true),
VariantStruct => ("variant_struct", VariantStruct => ("variant_struct",
vec!("id","ctor_id","qualname","value","scopeid"), true, true), vec!("id","ctor_id","qualname","type","value","scopeid"),
Function => ("function", vec!("id","qualname","declid","declidcrate","scopeid"), true, true),
Function => ("function",
vec!("id","qualname","declid","declidcrate","scopeid"),
true, true), true, true),
MethodDecl => ("method_decl", vec!("id","qualname","scopeid"), true, true), MethodDecl => ("method_decl", vec!("id","qualname","scopeid"), true, true),
Struct => ("struct", vec!("id","ctor_id","qualname","scopeid"), true, true), Struct => ("struct", vec!("id","ctor_id","qualname","scopeid","value"), true, true),
Trait => ("trait", vec!("id","qualname","scopeid"), true, true), Trait => ("trait", vec!("id","qualname","scopeid","value"), true, true),
Impl => ("impl", vec!("id","refid","refidcrate","scopeid"), true, true), Impl => ("impl", vec!("id","refid","refidcrate","scopeid"), true, true),
Module => ("module", vec!("id","qualname","scopeid","def_file"), true, false), Module => ("module", vec!("id","qualname","scopeid","def_file"), true, false),
UseAlias => ("use_alias", UseAlias => ("use_alias",
@ -128,7 +132,7 @@ impl<'a> FmtStrs<'a> {
true, false), true, false),
MethodCall => ("method_call", MethodCall => ("method_call",
vec!("refid","refidcrate","declid","declidcrate","scopeid"), vec!("refid","refidcrate","declid","declidcrate","scopeid"),
true, true), true, true),
Typedef => ("typedef", vec!("id","qualname","value"), true, true), Typedef => ("typedef", vec!("id","qualname","value"), true, true),
ExternalCrate => ("external_crate", vec!("name","crate","file_name"), false, false), ExternalCrate => ("external_crate", vec!("name","crate","file_name"), false, false),
Crate => ("crate", vec!("name"), true, false), Crate => ("crate", vec!("name"), true, false),
@ -140,7 +144,7 @@ impl<'a> FmtStrs<'a> {
true, true), true, true),
StructRef => ("struct_ref", StructRef => ("struct_ref",
vec!("refid","refidcrate","qualname","scopeid"), vec!("refid","refidcrate","qualname","scopeid"),
true, true), true, true),
FnRef => ("fn_ref", vec!("refid","refidcrate","qualname","scopeid"), true, true) FnRef => ("fn_ref", vec!("refid","refidcrate","qualname","scopeid"), true, true)
} }
} }
@ -157,6 +161,7 @@ impl<'a> FmtStrs<'a> {
} }
let values = values.iter().map(|s| { let values = values.iter().map(|s| {
// Never take more than 1020 chars
if s.len() > 1020 { if s.len() > 1020 {
s.as_slice().slice_to(1020) s.as_slice().slice_to(1020)
} else { } else {
@ -323,11 +328,12 @@ impl<'a> FmtStrs<'a> {
sub_span: Option<Span>, sub_span: Option<Span>,
id: NodeId, id: NodeId,
name: &str, name: &str,
scope_id: NodeId) { scope_id: NodeId,
value: &str) {
self.check_and_record(Enum, self.check_and_record(Enum,
span, span,
sub_span, sub_span,
svec!(id, name, scope_id)); svec!(id, name, scope_id, value));
} }
pub fn tuple_variant_str(&mut self, pub fn tuple_variant_str(&mut self,
@ -336,12 +342,13 @@ impl<'a> FmtStrs<'a> {
id: NodeId, id: NodeId,
name: &str, name: &str,
qualname: &str, qualname: &str,
typ: &str,
val: &str, val: &str,
scope_id: NodeId) { scope_id: NodeId) {
self.check_and_record(Variant, self.check_and_record(Variant,
span, span,
sub_span, sub_span,
svec!(id, name, qualname, val, scope_id)); svec!(id, name, qualname, typ, val, scope_id));
} }
pub fn struct_variant_str(&mut self, pub fn struct_variant_str(&mut self,
@ -350,12 +357,13 @@ impl<'a> FmtStrs<'a> {
id: NodeId, id: NodeId,
ctor_id: NodeId, ctor_id: NodeId,
name: &str, name: &str,
typ: &str,
val: &str, val: &str,
scope_id: NodeId) { scope_id: NodeId) {
self.check_and_record(VariantStruct, self.check_and_record(VariantStruct,
span, span,
sub_span, sub_span,
svec!(id, ctor_id, name, val, scope_id)); svec!(id, ctor_id, name, typ, val, scope_id));
} }
pub fn fn_str(&mut self, pub fn fn_str(&mut self,
@ -405,11 +413,12 @@ impl<'a> FmtStrs<'a> {
id: NodeId, id: NodeId,
ctor_id: NodeId, ctor_id: NodeId,
name: &str, name: &str,
scope_id: NodeId) { scope_id: NodeId,
value: &str) {
self.check_and_record(Struct, self.check_and_record(Struct,
span, span,
sub_span, sub_span,
svec!(id, ctor_id, name, scope_id)); svec!(id, ctor_id, name, scope_id, value));
} }
pub fn trait_str(&mut self, pub fn trait_str(&mut self,
@ -417,11 +426,12 @@ impl<'a> FmtStrs<'a> {
sub_span: Option<Span>, sub_span: Option<Span>,
id: NodeId, id: NodeId,
name: &str, name: &str,
scope_id: NodeId) { scope_id: NodeId,
value: &str) {
self.check_and_record(Trait, self.check_and_record(Trait,
span, span,
sub_span, sub_span,
svec!(id, name, scope_id)); svec!(id, name, scope_id, value));
} }
pub fn impl_str(&mut self, pub fn impl_str(&mut self,

View file

@ -13,22 +13,14 @@ use back::abi;
use llvm; use llvm;
use llvm::{ConstFCmp, ConstICmp, SetLinkage, PrivateLinkage, ValueRef, Bool, True, False}; use llvm::{ConstFCmp, ConstICmp, SetLinkage, PrivateLinkage, ValueRef, Bool, True, False};
use llvm::{IntEQ, IntNE, IntUGT, IntUGE, IntULT, IntULE, IntSGT, IntSGE, IntSLT, IntSLE, use llvm::{IntEQ, IntNE, IntUGT, IntUGE, IntULT, IntULE, IntSGT, IntSGE, IntSLT, IntSLE,
RealOEQ, RealOGT, RealOGE, RealOLT, RealOLE, RealONE}; RealOEQ, RealOGT, RealOGE, RealOLT, RealOLE, RealONE};
use metadata::csearch; use metadata::csearch;
use middle::const_eval; use middle::{const_eval, def};
use middle::def; use trans::{adt, closure, consts, debuginfo, expr, inline, machine};
use trans::adt; use trans::base::{mod, push_ctxt};
use trans::base;
use trans::base::push_ctxt;
use trans::closure;
use trans::common::*; use trans::common::*;
use trans::consts;
use trans::expr;
use trans::inline;
use trans::machine;
use trans::type_::Type; use trans::type_::Type;
use trans::type_of; use trans::type_of;
use trans::debuginfo;
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use util::ppaux::{Repr, ty_to_string}; use util::ppaux::{Repr, ty_to_string};
@ -418,7 +410,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
} }
} }
} }
ast::ExprField(ref base, field, _) => { ast::ExprField(ref base, field) => {
let (bv, bt) = const_expr(cx, &**base); let (bv, bt) = const_expr(cx, &**base);
let brepr = adt::represent_type(cx, bt); let brepr = adt::represent_type(cx, bt);
expr::with_field_tys(cx.tcx(), bt, None, |discr, field_tys| { expr::with_field_tys(cx.tcx(), bt, None, |discr, field_tys| {
@ -426,7 +418,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
adt::const_get_field(cx, &*brepr, bv, discr, ix) adt::const_get_field(cx, &*brepr, bv, discr, ix)
}) })
} }
ast::ExprTupField(ref base, idx, _) => { ast::ExprTupField(ref base, idx) => {
let (bv, bt) = const_expr(cx, &**base); let (bv, bt) = const_expr(cx, &**base);
let brepr = adt::represent_type(cx, bt); let brepr = adt::represent_type(cx, bt);
expr::with_field_tys(cx.tcx(), bt, None, |discr, _| { expr::with_field_tys(cx.tcx(), bt, None, |discr, _| {

View file

@ -197,13 +197,10 @@ use llvm::{ModuleRef, ContextRef, ValueRef};
use llvm::debuginfo::*; use llvm::debuginfo::*;
use metadata::csearch; use metadata::csearch;
use middle::subst::{mod, Subst, Substs}; use middle::subst::{mod, Subst, Substs};
use trans::adt; use trans::{mod, adt, machine, type_of};
use trans::common::*; use trans::common::*;
use trans::machine;
use trans::_match::{BindingInfo, TrByCopy, TrByMove, TrByRef}; use trans::_match::{BindingInfo, TrByCopy, TrByMove, TrByRef};
use trans::type_of;
use trans::type_::Type; use trans::type_::Type;
use trans;
use middle::ty::{mod, Ty}; use middle::ty::{mod, Ty};
use middle::pat_util; use middle::pat_util;
use session::config::{mod, FullDebugInfo, LimitedDebugInfo, NoDebugInfo}; use session::config::{mod, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
@ -219,8 +216,7 @@ use syntax::util::interner::Interner;
use syntax::codemap::{Span, Pos}; use syntax::codemap::{Span, Pos};
use syntax::{ast, codemap, ast_util, ast_map}; use syntax::{ast, codemap, ast_util, ast_map};
use syntax::ast_util::PostExpansionMethod; use syntax::ast_util::PostExpansionMethod;
use syntax::parse::token; use syntax::parse::token::{mod, special_idents};
use syntax::parse::token::special_idents;
static DW_LANG_RUST: c_uint = 0x9000; static DW_LANG_RUST: c_uint = 0x9000;
@ -3456,8 +3452,8 @@ fn populate_scope_map(cx: &CrateContext,
ast::ExprCast(ref sub_exp, _) | ast::ExprCast(ref sub_exp, _) |
ast::ExprAddrOf(_, ref sub_exp) | ast::ExprAddrOf(_, ref sub_exp) |
ast::ExprField(ref sub_exp, _, _) | ast::ExprField(ref sub_exp, _) |
ast::ExprTupField(ref sub_exp, _, _) | ast::ExprTupField(ref sub_exp, _) |
ast::ExprParen(ref sub_exp) => ast::ExprParen(ref sub_exp) =>
walk_expr(cx, &**sub_exp, scope_stack, scope_map), walk_expr(cx, &**sub_exp, scope_stack, scope_map),

View file

@ -38,47 +38,26 @@ pub use self::Dest::*;
use self::lazy_binop_ty::*; use self::lazy_binop_ty::*;
use back::abi; use back::abi;
use llvm; use llvm::{mod, ValueRef};
use llvm::{ValueRef};
use middle::def; use middle::def;
use middle::mem_categorization::Typer; use middle::mem_categorization::Typer;
use middle::subst; use middle::subst::{mod, Subst};
use middle::subst::Subst; use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
use trans::_match; use trans::{debuginfo, glue, machine, meth, inline, tvec, type_of};
use trans::adt;
use trans::asm;
use trans::base::*; use trans::base::*;
use trans::base;
use trans::build::*; use trans::build::*;
use trans::callee; use trans::cleanup::{mod, CleanupMethods};
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::closure;
use trans::common::*; use trans::common::*;
use trans::consts;
use trans::controlflow;
use trans::datum::*; use trans::datum::*;
use trans::debuginfo; use middle::ty::{mod, struct_fields, tup_fields};
use trans::glue; use middle::ty::{AdjustDerefRef, AdjustAddEnv, AutoUnsafe, AutoPtr, Ty};
use trans::machine; use middle::typeck::{mod, MethodCall};
use trans::meth;
use trans::inline;
use trans::tvec;
use trans::type_of;
use middle::ty::{struct_fields, tup_fields};
use middle::ty::{AdjustDerefRef, AdjustAddEnv, AutoUnsafe};
use middle::ty::{AutoPtr};
use middle::ty::{mod, Ty};
use middle::typeck;
use middle::typeck::MethodCall;
use util::common::indenter; use util::common::indenter;
use util::ppaux::Repr; use util::ppaux::Repr;
use trans::machine::{llsize_of, llsize_of_alloc}; use trans::machine::{llsize_of, llsize_of_alloc};
use trans::type_::Type; use trans::type_::Type;
use syntax::ast; use syntax::{ast, ast_util, codemap};
use syntax::ast_util;
use syntax::codemap;
use syntax::print::pprust::{expr_to_string}; use syntax::print::pprust::{expr_to_string};
use syntax::ptr::P; use syntax::ptr::P;
use std::rc::Rc; use std::rc::Rc;
@ -599,10 +578,10 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
ast::ExprPath(_) => { ast::ExprPath(_) => {
trans_def(bcx, expr, bcx.def(expr.id)) trans_def(bcx, expr, bcx.def(expr.id))
} }
ast::ExprField(ref base, ident, _) => { ast::ExprField(ref base, ident) => {
trans_rec_field(bcx, &**base, ident.node) trans_rec_field(bcx, &**base, ident.node)
} }
ast::ExprTupField(ref base, idx, _) => { ast::ExprTupField(ref base, idx) => {
trans_rec_tup_field(bcx, &**base, idx.node) trans_rec_tup_field(bcx, &**base, idx.node)
} }
ast::ExprIndex(ref base, ref idx) => { ast::ExprIndex(ref base, ref idx) => {

View file

@ -1923,7 +1923,7 @@ impl Clean<ViewItemInner> for ast::ViewItem_ {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub enum ViewPath { pub enum ViewPath {
// use str = source; // use source as str;
SimpleImport(String, ImportSource), SimpleImport(String, ImportSource),
// use source::*; // use source::*;
GlobImport(ImportSource), GlobImport(ImportSource),

View file

@ -18,7 +18,6 @@ use rustc_trans::back::link;
use syntax::{ast, ast_map, codemap, diagnostic}; use syntax::{ast, ast_map, codemap, diagnostic};
use std::cell::RefCell; use std::cell::RefCell;
use std::os;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use arena::TypedArena; use arena::TypedArena;
@ -89,7 +88,7 @@ pub fn run_core(libs: Vec<Path>, cfgs: Vec<String>, externs: Externs,
let warning_lint = lint::builtin::WARNINGS.name_lower(); let warning_lint = lint::builtin::WARNINGS.name_lower();
let sessopts = config::Options { let sessopts = config::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()), maybe_sysroot: None,
addl_lib_search_paths: RefCell::new(libs), addl_lib_search_paths: RefCell::new(libs),
crate_types: vec!(config::CrateTypeRlib), crate_types: vec!(config::CrateTypeRlib),
lint_opts: vec!((warning_lint, lint::Allow)), lint_opts: vec!((warning_lint, lint::Allow)),

View file

@ -28,6 +28,7 @@
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
use libc; use libc;
use std::ascii::AsciiExt;
use std::cell::{RefCell, Cell}; use std::cell::{RefCell, Cell};
use std::fmt; use std::fmt;
use std::slice; use std::slice;
@ -223,12 +224,8 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
}; };
// Transform the contents of the header into a hyphenated string // Transform the contents of the header into a hyphenated string
let id = s.as_slice().words().map(|s| { let id = s.as_slice().words().map(|s| s.to_ascii_lower())
match s.to_ascii_opt() { .collect::<Vec<String>>().connect("-");
Some(s) => s.to_lowercase().into_string(),
None => s.to_string()
}
}).collect::<Vec<String>>().connect("-");
// This is a terrible hack working around how hoedown gives us rendered // This is a terrible hack working around how hoedown gives us rendered
// html for text rather than the raw text. // html for text rather than the raw text.

View file

@ -1428,6 +1428,8 @@ impl<'a> fmt::Show for Item<'a> {
clean::TypedefItem(ref t) => item_typedef(fmt, self.item, t), clean::TypedefItem(ref t) => item_typedef(fmt, self.item, t),
clean::MacroItem(ref m) => item_macro(fmt, self.item, m), clean::MacroItem(ref m) => item_macro(fmt, self.item, m),
clean::PrimitiveItem(ref p) => item_primitive(fmt, self.item, p), clean::PrimitiveItem(ref p) => item_primitive(fmt, self.item, p),
clean::StaticItem(ref i) => item_static(fmt, self.item, i),
clean::ConstantItem(ref c) => item_constant(fmt, self.item, c),
_ => Ok(()) _ => Ok(())
} }
} }
@ -1453,13 +1455,6 @@ fn full_path(cx: &Context, item: &clean::Item) -> String {
return s return s
} }
fn blank<'a>(s: Option<&'a str>) -> &'a str {
match s {
Some(s) => s,
None => ""
}
}
fn shorter<'a>(s: Option<&'a str>) -> &'a str { fn shorter<'a>(s: Option<&'a str>) -> &'a str {
match s { match s {
Some(s) => match s.find_str("\n\n") { Some(s) => match s.find_str("\n\n") {
@ -1570,66 +1565,18 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
id = short, name = name)); id = short, name = name));
} }
struct Initializer<'a>(&'a str, Item<'a>);
impl<'a> fmt::Show for Initializer<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Initializer(s, item) = *self;
if s.len() == 0 { return Ok(()); }
try!(write!(f, "<code> = </code>"));
if s.contains("\n") {
match item.href() {
Some(url) => {
write!(f, "<a href='{}'>[definition]</a>",
url)
}
None => Ok(()),
}
} else {
write!(f, "<code>{}</code>", s.as_slice())
}
}
}
match myitem.inner { match myitem.inner {
clean::StaticItem(ref s) | clean::ForeignStaticItem(ref s) => {
try!(write!(w, "
<tr>
<td>{}<code>{}static {}{}: {}</code>{}</td>
<td class='docblock'>{}&nbsp;</td>
</tr>
",
ConciseStability(&myitem.stability),
VisSpace(myitem.visibility),
MutableSpace(s.mutability),
*myitem.name.as_ref().unwrap(),
s.type_,
Initializer(s.expr.as_slice(), Item { cx: cx, item: myitem }),
Markdown(blank(myitem.doc_value()))));
}
clean::ConstantItem(ref s) => {
try!(write!(w, "
<tr>
<td>{}<code>{}const {}: {}</code>{}</td>
<td class='docblock'>{}&nbsp;</td>
</tr>
",
ConciseStability(&myitem.stability),
VisSpace(myitem.visibility),
*myitem.name.as_ref().unwrap(),
s.type_,
Initializer(s.expr.as_slice(), Item { cx: cx, item: myitem }),
Markdown(blank(myitem.doc_value()))));
}
clean::ViewItemItem(ref item) => { clean::ViewItemItem(ref item) => {
match item.inner { match item.inner {
clean::ExternCrate(ref name, ref src, _) => { clean::ExternCrate(ref name, ref src, _) => {
try!(write!(w, "<tr><td><code>extern crate {}",
name.as_slice()));
match *src { match *src {
Some(ref src) => try!(write!(w, " = \"{}\"", Some(ref src) =>
src.as_slice())), try!(write!(w, "<tr><td><code>extern crate \"{}\" as {}",
None => {} src.as_slice(),
name.as_slice())),
None =>
try!(write!(w, "<tr><td><code>extern crate {}",
name.as_slice())),
} }
try!(write!(w, ";</code></td></tr>")); try!(write!(w, ";</code></td></tr>"));
} }
@ -1665,6 +1612,39 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
write!(w, "</table>") write!(w, "</table>")
} }
struct Initializer<'a>(&'a str);
impl<'a> fmt::Show for Initializer<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Initializer(s) = *self;
if s.len() == 0 { return Ok(()); }
try!(write!(f, "<code> = </code>"));
write!(f, "<code>{}</code>", s.as_slice())
}
}
fn item_constant(w: &mut fmt::Formatter, it: &clean::Item,
c: &clean::Constant) -> fmt::Result {
try!(write!(w, "<pre class='rust const'>{vis}const \
{name}: {typ}{init}</pre>",
vis = VisSpace(it.visibility),
name = it.name.as_ref().unwrap().as_slice(),
typ = c.type_,
init = Initializer(c.expr.as_slice())));
document(w, it)
}
fn item_static(w: &mut fmt::Formatter, it: &clean::Item,
s: &clean::Static) -> fmt::Result {
try!(write!(w, "<pre class='rust static'>{vis}static {mutability}\
{name}: {typ}{init}</pre>",
vis = VisSpace(it.visibility),
mutability = MutableSpace(s.mutability),
name = it.name.as_ref().unwrap().as_slice(),
typ = s.type_,
init = Initializer(s.expr.as_slice())));
document(w, it)
}
fn item_function(w: &mut fmt::Formatter, it: &clean::Item, fn item_function(w: &mut fmt::Formatter, it: &clean::Item,
f: &clean::Function) -> fmt::Result { f: &clean::Function) -> fmt::Result {
try!(write!(w, "<pre class='rust fn'>{vis}{fn_style}fn \ try!(write!(w, "<pre class='rust fn'>{vis}{fn_style}fn \

View file

@ -234,6 +234,7 @@ nav.sub {
.content .highlighted.struct { background-color: #e7b1a0; } .content .highlighted.struct { background-color: #e7b1a0; }
.content .highlighted.fn { background-color: #c6afb3; } .content .highlighted.fn { background-color: #c6afb3; }
.content .highlighted.method { background-color: #c6afb3; } .content .highlighted.method { background-color: #c6afb3; }
.content .highlighted.tymethod { background-color: #c6afb3; }
.content .highlighted.ffi { background-color: #c6afb3; } .content .highlighted.ffi { background-color: #c6afb3; }
.docblock.short.nowrap { .docblock.short.nowrap {
@ -348,6 +349,7 @@ p a:hover { text-decoration: underline; }
.content span.struct, .content a.struct, .block a.current.struct { color: #e53700; } .content span.struct, .content a.struct, .block a.current.struct { color: #e53700; }
.content span.fn, .content a.fn, .block a.current.fn { color: #8c6067; } .content span.fn, .content a.fn, .block a.current.fn { color: #8c6067; }
.content span.method, .content a.method, .block a.current.method { color: #8c6067; } .content span.method, .content a.method, .block a.current.method { color: #8c6067; }
.content span.tymethod, .content a.tymethod, .block a.current.tymethod { color: #8c6067; }
.content span.ffi, .content a.ffi, .block a.current.ffi { color: #8c6067; } .content span.ffi, .content a.ffi, .block a.current.ffi { color: #8c6067; }
.content .fnname { color: #8c6067; } .content .fnname { color: #8c6067; }

View file

@ -12,7 +12,8 @@
//! Operations on ASCII strings and characters //! Operations on ASCII strings and characters
#![experimental] #![unstable = "unsure about placement and naming"]
#![allow(deprecated)]
use core::kinds::Sized; use core::kinds::Sized;
use fmt; use fmt;
@ -31,30 +32,47 @@ pub struct Ascii { chr: u8 }
impl Ascii { impl Ascii {
/// Converts an ascii character into a `u8`. /// Converts an ascii character into a `u8`.
#[inline] #[inline]
pub fn to_byte(self) -> u8 { #[unstable = "recently renamed"]
pub fn as_byte(&self) -> u8 {
self.chr self.chr
} }
/// Deprecated: use `as_byte` isntead.
#[deprecated = "use as_byte"]
pub fn to_byte(self) -> u8 {
self.as_byte()
}
/// Converts an ascii character into a `char`. /// Converts an ascii character into a `char`.
#[inline] #[inline]
pub fn to_char(self) -> char { #[unstable = "recently renamed"]
pub fn as_char(&self) -> char {
self.chr as char self.chr as char
} }
/// Deprecated: use `as_char` isntead.
#[deprecated = "use as_char"]
pub fn to_char(self) -> char {
self.as_char()
}
/// Convert to lowercase. /// Convert to lowercase.
#[inline] #[inline]
pub fn to_lowercase(self) -> Ascii { #[stable]
pub fn to_lowercase(&self) -> Ascii {
Ascii{chr: ASCII_LOWER_MAP[self.chr as uint]} Ascii{chr: ASCII_LOWER_MAP[self.chr as uint]}
} }
/// Convert to uppercase. /// Convert to uppercase.
#[inline] #[inline]
pub fn to_uppercase(self) -> Ascii { #[stable]
pub fn to_uppercase(&self) -> Ascii {
Ascii{chr: ASCII_UPPER_MAP[self.chr as uint]} Ascii{chr: ASCII_UPPER_MAP[self.chr as uint]}
} }
/// Compares two ascii characters of equality, ignoring case. /// Compares two ascii characters of equality, ignoring case.
#[inline] #[inline]
#[deprecated = "normalize with to_lowercase"]
pub fn eq_ignore_case(self, other: Ascii) -> bool { pub fn eq_ignore_case(self, other: Ascii) -> bool {
ASCII_LOWER_MAP[self.chr as uint] == ASCII_LOWER_MAP[other.chr as uint] ASCII_LOWER_MAP[self.chr as uint] == ASCII_LOWER_MAP[other.chr as uint]
} }
@ -63,66 +81,77 @@ impl Ascii {
/// Check if the character is a letter (a-z, A-Z) /// Check if the character is a letter (a-z, A-Z)
#[inline] #[inline]
#[stable]
pub fn is_alphabetic(&self) -> bool { pub fn is_alphabetic(&self) -> bool {
(self.chr >= 0x41 && self.chr <= 0x5A) || (self.chr >= 0x61 && self.chr <= 0x7A) (self.chr >= 0x41 && self.chr <= 0x5A) || (self.chr >= 0x61 && self.chr <= 0x7A)
} }
/// Check if the character is a number (0-9) /// Check if the character is a number (0-9)
#[inline] #[inline]
#[unstable = "may be renamed"]
pub fn is_digit(&self) -> bool { pub fn is_digit(&self) -> bool {
self.chr >= 0x30 && self.chr <= 0x39 self.chr >= 0x30 && self.chr <= 0x39
} }
/// Check if the character is a letter or number /// Check if the character is a letter or number
#[inline] #[inline]
#[stable]
pub fn is_alphanumeric(&self) -> bool { pub fn is_alphanumeric(&self) -> bool {
self.is_alphabetic() || self.is_digit() self.is_alphabetic() || self.is_digit()
} }
/// Check if the character is a space or horizontal tab /// Check if the character is a space or horizontal tab
#[inline] #[inline]
#[experimental = "likely to be removed"]
pub fn is_blank(&self) -> bool { pub fn is_blank(&self) -> bool {
self.chr == b' ' || self.chr == b'\t' self.chr == b' ' || self.chr == b'\t'
} }
/// Check if the character is a control character /// Check if the character is a control character
#[inline] #[inline]
#[stable]
pub fn is_control(&self) -> bool { pub fn is_control(&self) -> bool {
self.chr < 0x20 || self.chr == 0x7F self.chr < 0x20 || self.chr == 0x7F
} }
/// Checks if the character is printable (except space) /// Checks if the character is printable (except space)
#[inline] #[inline]
#[experimental = "unsure about naming, or whether this is needed"]
pub fn is_graph(&self) -> bool { pub fn is_graph(&self) -> bool {
(self.chr - 0x21) < 0x5E (self.chr - 0x21) < 0x5E
} }
/// Checks if the character is printable (including space) /// Checks if the character is printable (including space)
#[inline] #[inline]
#[unstable = "unsure about naming"]
pub fn is_print(&self) -> bool { pub fn is_print(&self) -> bool {
(self.chr - 0x20) < 0x5F (self.chr - 0x20) < 0x5F
} }
/// Checks if the character is lowercase /// Checks if the character is alphabetic and lowercase
#[inline] #[inline]
#[stable]
pub fn is_lowercase(&self) -> bool { pub fn is_lowercase(&self) -> bool {
(self.chr - b'a') < 26 (self.chr - b'a') < 26
} }
/// Checks if the character is uppercase /// Checks if the character is alphabetic and uppercase
#[inline] #[inline]
#[stable]
pub fn is_uppercase(&self) -> bool { pub fn is_uppercase(&self) -> bool {
(self.chr - b'A') < 26 (self.chr - b'A') < 26
} }
/// Checks if the character is punctuation /// Checks if the character is punctuation
#[inline] #[inline]
#[stable]
pub fn is_punctuation(&self) -> bool { pub fn is_punctuation(&self) -> bool {
self.is_graph() && !self.is_alphanumeric() self.is_graph() && !self.is_alphanumeric()
} }
/// Checks if the character is a valid hex digit /// Checks if the character is a valid hex digit
#[inline] #[inline]
#[stable]
pub fn is_hex(&self) -> bool { pub fn is_hex(&self) -> bool {
self.is_digit() || ((self.chr | 32u8) - b'a') < 6 self.is_digit() || ((self.chr | 32u8) - b'a') < 6
} }
@ -135,6 +164,7 @@ impl<'a> fmt::Show for Ascii {
} }
/// Trait for converting into an ascii type. /// Trait for converting into an ascii type.
#[experimental = "may be replaced by generic conversion traits"]
pub trait AsciiCast<T> { pub trait AsciiCast<T> {
/// Convert to an ascii type, panic on non-ASCII input. /// Convert to an ascii type, panic on non-ASCII input.
#[inline] #[inline]
@ -160,6 +190,7 @@ pub trait AsciiCast<T> {
fn is_ascii(&self) -> bool; fn is_ascii(&self) -> bool;
} }
#[experimental = "may be replaced by generic conversion traits"]
impl<'a> AsciiCast<&'a[Ascii]> for &'a [u8] { impl<'a> AsciiCast<&'a[Ascii]> for &'a [u8] {
#[inline] #[inline]
unsafe fn to_ascii_nocheck(&self) -> &'a[Ascii] { unsafe fn to_ascii_nocheck(&self) -> &'a[Ascii] {
@ -175,6 +206,7 @@ impl<'a> AsciiCast<&'a[Ascii]> for &'a [u8] {
} }
} }
#[experimental = "may be replaced by generic conversion traits"]
impl<'a> AsciiCast<&'a [Ascii]> for &'a str { impl<'a> AsciiCast<&'a [Ascii]> for &'a str {
#[inline] #[inline]
unsafe fn to_ascii_nocheck(&self) -> &'a [Ascii] { unsafe fn to_ascii_nocheck(&self) -> &'a [Ascii] {
@ -187,6 +219,7 @@ impl<'a> AsciiCast<&'a [Ascii]> for &'a str {
} }
} }
#[experimental = "may be replaced by generic conversion traits"]
impl AsciiCast<Ascii> for u8 { impl AsciiCast<Ascii> for u8 {
#[inline] #[inline]
unsafe fn to_ascii_nocheck(&self) -> Ascii { unsafe fn to_ascii_nocheck(&self) -> Ascii {
@ -199,6 +232,7 @@ impl AsciiCast<Ascii> for u8 {
} }
} }
#[experimental = "may be replaced by generic conversion traits"]
impl AsciiCast<Ascii> for char { impl AsciiCast<Ascii> for char {
#[inline] #[inline]
unsafe fn to_ascii_nocheck(&self) -> Ascii { unsafe fn to_ascii_nocheck(&self) -> Ascii {
@ -212,6 +246,7 @@ impl AsciiCast<Ascii> for char {
} }
/// Trait for copyless casting to an ascii vector. /// Trait for copyless casting to an ascii vector.
#[experimental = "may be replaced by generic conversion traits"]
pub trait OwnedAsciiCast { pub trait OwnedAsciiCast {
/// Check if convertible to ascii /// Check if convertible to ascii
fn is_ascii(&self) -> bool; fn is_ascii(&self) -> bool;
@ -241,6 +276,7 @@ pub trait OwnedAsciiCast {
unsafe fn into_ascii_nocheck(self) -> Vec<Ascii>; unsafe fn into_ascii_nocheck(self) -> Vec<Ascii>;
} }
#[experimental = "may be replaced by generic conversion traits"]
impl OwnedAsciiCast for String { impl OwnedAsciiCast for String {
#[inline] #[inline]
fn is_ascii(&self) -> bool { fn is_ascii(&self) -> bool {
@ -253,6 +289,7 @@ impl OwnedAsciiCast for String {
} }
} }
#[experimental = "may be replaced by generic conversion traits"]
impl OwnedAsciiCast for Vec<u8> { impl OwnedAsciiCast for Vec<u8> {
#[inline] #[inline]
fn is_ascii(&self) -> bool { fn is_ascii(&self) -> bool {
@ -274,6 +311,7 @@ impl OwnedAsciiCast for Vec<u8> {
/// Trait for converting an ascii type to a string. Needed to convert /// Trait for converting an ascii type to a string. Needed to convert
/// `&[Ascii]` to `&str`. /// `&[Ascii]` to `&str`.
#[experimental = "may be replaced by generic conversion traits"]
pub trait AsciiStr for Sized? { pub trait AsciiStr for Sized? {
/// Convert to a string. /// Convert to a string.
fn as_str_ascii<'a>(&'a self) -> &'a str; fn as_str_ascii<'a>(&'a self) -> &'a str;
@ -283,6 +321,7 @@ pub trait AsciiStr for Sized? {
fn to_lower(&self) -> Vec<Ascii>; fn to_lower(&self) -> Vec<Ascii>;
/// Convert to vector representing a lower cased ascii string. /// Convert to vector representing a lower cased ascii string.
#[deprecated = "use iterators instead"]
fn to_lowercase(&self) -> Vec<Ascii>; fn to_lowercase(&self) -> Vec<Ascii>;
/// Deprecated: use `to_uppercase` /// Deprecated: use `to_uppercase`
@ -290,12 +329,15 @@ pub trait AsciiStr for Sized? {
fn to_upper(&self) -> Vec<Ascii>; fn to_upper(&self) -> Vec<Ascii>;
/// Convert to vector representing a upper cased ascii string. /// Convert to vector representing a upper cased ascii string.
#[deprecated = "use iterators instead"]
fn to_uppercase(&self) -> Vec<Ascii>; fn to_uppercase(&self) -> Vec<Ascii>;
/// Compares two Ascii strings ignoring case. /// Compares two Ascii strings ignoring case.
#[deprecated = "use iterators instead"]
fn eq_ignore_case(&self, other: &[Ascii]) -> bool; fn eq_ignore_case(&self, other: &[Ascii]) -> bool;
} }
#[experimental = "may be replaced by generic conversion traits"]
impl AsciiStr for [Ascii] { impl AsciiStr for [Ascii] {
#[inline] #[inline]
fn as_str_ascii<'a>(&'a self) -> &'a str { fn as_str_ascii<'a>(&'a self) -> &'a str {
@ -336,11 +378,13 @@ impl IntoString for Vec<Ascii> {
} }
/// Trait to convert to an owned byte vector by consuming self /// Trait to convert to an owned byte vector by consuming self
#[experimental = "may be replaced by generic conversion traits"]
pub trait IntoBytes { pub trait IntoBytes {
/// Converts to an owned byte vector by consuming self /// Converts to an owned byte vector by consuming self
fn into_bytes(self) -> Vec<u8>; fn into_bytes(self) -> Vec<u8>;
} }
#[experimental = "may be replaced by generic conversion traits"]
impl IntoBytes for Vec<Ascii> { impl IntoBytes for Vec<Ascii> {
fn into_bytes(self) -> Vec<u8> { fn into_bytes(self) -> Vec<u8> {
unsafe { unsafe {
@ -358,6 +402,7 @@ impl IntoBytes for Vec<Ascii> {
/// Extension methods for ASCII-subset only operations on owned strings /// Extension methods for ASCII-subset only operations on owned strings
#[experimental = "would prefer to do this in a more general way"]
pub trait OwnedAsciiExt { pub trait OwnedAsciiExt {
/// Convert the string to ASCII upper case: /// Convert the string to ASCII upper case:
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
@ -371,6 +416,7 @@ pub trait OwnedAsciiExt {
} }
/// Extension methods for ASCII-subset only operations on string slices /// Extension methods for ASCII-subset only operations on string slices
#[experimental = "would prefer to do this in a more general way"]
pub trait AsciiExt<T> for Sized? { pub trait AsciiExt<T> for Sized? {
/// Makes a copy of the string in ASCII upper case: /// Makes a copy of the string in ASCII upper case:
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
@ -388,6 +434,7 @@ pub trait AsciiExt<T> for Sized? {
fn eq_ignore_ascii_case(&self, other: &Self) -> bool; fn eq_ignore_ascii_case(&self, other: &Self) -> bool;
} }
#[experimental = "would prefer to do this in a more general way"]
impl AsciiExt<String> for str { impl AsciiExt<String> for str {
#[inline] #[inline]
fn to_ascii_upper(&self) -> String { fn to_ascii_upper(&self) -> String {
@ -407,6 +454,7 @@ impl AsciiExt<String> for str {
} }
} }
#[experimental = "would prefer to do this in a more general way"]
impl OwnedAsciiExt for String { impl OwnedAsciiExt for String {
#[inline] #[inline]
fn into_ascii_upper(self) -> String { fn into_ascii_upper(self) -> String {
@ -421,6 +469,7 @@ impl OwnedAsciiExt for String {
} }
} }
#[experimental = "would prefer to do this in a more general way"]
impl AsciiExt<Vec<u8>> for [u8] { impl AsciiExt<Vec<u8>> for [u8] {
#[inline] #[inline]
fn to_ascii_upper(&self) -> Vec<u8> { fn to_ascii_upper(&self) -> Vec<u8> {
@ -443,6 +492,7 @@ impl AsciiExt<Vec<u8>> for [u8] {
} }
} }
#[experimental = "would prefer to do this in a more general way"]
impl OwnedAsciiExt for Vec<u8> { impl OwnedAsciiExt for Vec<u8> {
#[inline] #[inline]
fn into_ascii_upper(mut self) -> Vec<u8> { fn into_ascii_upper(mut self) -> Vec<u8> {
@ -472,6 +522,7 @@ impl OwnedAsciiExt for Vec<u8> {
/// - Any other chars in the range [0x20,0x7e] are not escaped. /// - Any other chars in the range [0x20,0x7e] are not escaped.
/// - Any other chars are given hex escapes. /// - Any other chars are given hex escapes.
/// - Unicode escapes are never generated by this function. /// - Unicode escapes are never generated by this function.
#[unstable = "needs to be updated to use an iterator"]
pub fn escape_default(c: u8, f: |u8|) { pub fn escape_default(c: u8, f: |u8|) {
match c { match c {
b'\t' => { f(b'\\'); f(b't'); } b'\t' => { f(b'\\'); f(b't'); }
@ -494,7 +545,7 @@ pub fn escape_default(c: u8, f: |u8|) {
} }
} }
pub static ASCII_LOWER_MAP: [u8, ..256] = [ static ASCII_LOWER_MAP: [u8, ..256] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
@ -533,7 +584,7 @@ pub static ASCII_LOWER_MAP: [u8, ..256] = [
0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff, 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
]; ];
pub static ASCII_UPPER_MAP: [u8, ..256] = [ static ASCII_UPPER_MAP: [u8, ..256] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,

View file

@ -1110,7 +1110,7 @@ extern "system" {
/// ///
/// The first element is traditionally the path to the executable, but it can be /// The first element is traditionally the path to the executable, but it can be
/// set to arbitrary text, and it may not even exist, so this property should not /// set to arbitrary text, and it may not even exist, so this property should not
// be relied upon for security purposes. /// be relied upon for security purposes.
/// ///
/// The arguments are interpreted as utf-8, with invalid bytes replaced with \uFFFD. /// The arguments are interpreted as utf-8, with invalid bytes replaced with \uFFFD.
/// See `String::from_utf8_lossy` for details. /// See `String::from_utf8_lossy` for details.

View file

@ -235,10 +235,10 @@ impl GenericPathUnsafe for Path {
let repr = me.repr.as_slice(); let repr = me.repr.as_slice();
match me.prefix { match me.prefix {
Some(DiskPrefix) => { Some(DiskPrefix) => {
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_uppercase().to_byte() repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_uppercase().as_byte()
} }
Some(VerbatimDiskPrefix) => { Some(VerbatimDiskPrefix) => {
repr.as_bytes()[4] == path.as_bytes()[0].to_ascii().to_uppercase().to_byte() repr.as_bytes()[4] == path.as_bytes()[0].to_ascii().to_uppercase().as_byte()
} }
_ => false _ => false
} }
@ -673,14 +673,17 @@ impl Path {
match (self.prefix, other.prefix) { match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => { (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() && self.is_absolute() &&
s_repr.as_bytes()[0].to_ascii().eq_ignore_case(o_repr.as_bytes()[4].to_ascii()) s_repr.as_bytes()[0].to_ascii().to_lowercase() ==
o_repr.as_bytes()[4].to_ascii().to_lowercase()
} }
(Some(VerbatimDiskPrefix), Some(DiskPrefix)) => { (Some(VerbatimDiskPrefix), Some(DiskPrefix)) => {
other.is_absolute() && other.is_absolute() &&
s_repr.as_bytes()[4].to_ascii().eq_ignore_case(o_repr.as_bytes()[0].to_ascii()) s_repr.as_bytes()[4].to_ascii().to_lowercase() ==
o_repr.as_bytes()[0].to_ascii().to_lowercase()
} }
(Some(VerbatimDiskPrefix), Some(VerbatimDiskPrefix)) => { (Some(VerbatimDiskPrefix), Some(VerbatimDiskPrefix)) => {
s_repr.as_bytes()[4].to_ascii().eq_ignore_case(o_repr.as_bytes()[4].to_ascii()) s_repr.as_bytes()[4].to_ascii().to_lowercase() ==
o_repr.as_bytes()[4].to_ascii().to_lowercase()
} }
(Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => { (Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
s_repr.slice(2, self.prefix_len()) == o_repr.slice(8, other.prefix_len()) s_repr.slice(2, self.prefix_len()) == o_repr.slice(8, other.prefix_len())
@ -747,10 +750,7 @@ impl Path {
let mut s = String::from_str(s.slice_to(len)); let mut s = String::from_str(s.slice_to(len));
unsafe { unsafe {
let v = s.as_mut_vec(); let v = s.as_mut_vec();
v[0] = (*v)[0] v[0] = (*v)[0].to_ascii().to_uppercase().as_byte();
.to_ascii()
.to_uppercase()
.to_byte();
} }
if is_abs { if is_abs {
// normalize C:/ to C:\ // normalize C:/ to C:\
@ -765,7 +765,7 @@ impl Path {
let mut s = String::from_str(s.slice_to(len)); let mut s = String::from_str(s.slice_to(len));
unsafe { unsafe {
let v = s.as_mut_vec(); let v = s.as_mut_vec();
v[4] = (*v)[4].to_ascii().to_uppercase().to_byte(); v[4] = (*v)[4].to_ascii().to_uppercase().as_byte();
} }
Some(s) Some(s)
} }
@ -787,13 +787,13 @@ impl Path {
match prefix { match prefix {
Some(DiskPrefix) => { Some(DiskPrefix) => {
s.push(prefix_.as_bytes()[0].to_ascii() s.push(prefix_.as_bytes()[0].to_ascii()
.to_uppercase().to_char()); .to_uppercase().as_char());
s.push(':'); s.push(':');
} }
Some(VerbatimDiskPrefix) => { Some(VerbatimDiskPrefix) => {
s.push_str(prefix_.slice_to(4)); s.push_str(prefix_.slice_to(4));
s.push(prefix_.as_bytes()[4].to_ascii() s.push(prefix_.as_bytes()[4].to_ascii()
.to_uppercase().to_char()); .to_uppercase().as_char());
s.push_str(prefix_.slice_from(5)); s.push_str(prefix_.slice_from(5));
} }
Some(UNCPrefix(a,b)) => { Some(UNCPrefix(a,b)) => {

View file

@ -673,8 +673,8 @@ pub enum Expr_ {
ExprAssign(P<Expr>, P<Expr>), ExprAssign(P<Expr>, P<Expr>),
ExprAssignOp(BinOp, P<Expr>, P<Expr>), ExprAssignOp(BinOp, P<Expr>, P<Expr>),
ExprField(P<Expr>, SpannedIdent, Vec<P<Ty>>), ExprField(P<Expr>, SpannedIdent),
ExprTupField(P<Expr>, Spanned<uint>, Vec<P<Ty>>), ExprTupField(P<Expr>, Spanned<uint>),
ExprIndex(P<Expr>, P<Expr>), ExprIndex(P<Expr>, P<Expr>),
ExprSlice(P<Expr>, Option<P<Expr>>, Option<P<Expr>>, Mutability), ExprSlice(P<Expr>, Option<P<Expr>>, Option<P<Expr>>, Mutability),

View file

@ -577,7 +577,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
}; };
let id = Spanned { node: ident, span: field_span }; let id = Spanned { node: ident, span: field_span };
self.expr(sp, ast::ExprField(expr, id, Vec::new())) self.expr(sp, ast::ExprField(expr, id))
} }
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> { fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
let field_span = Span { let field_span = Span {
@ -587,7 +587,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
}; };
let id = Spanned { node: idx, span: field_span }; let id = Spanned { node: idx, span: field_span };
self.expr(sp, ast::ExprTupField(expr, id, Vec::new())) self.expr(sp, ast::ExprTupField(expr, id))
} }
fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> { fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e)) self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e))

View file

@ -1345,15 +1345,13 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span}: Expr, folder: &mut T) ->
folder.fold_expr(el), folder.fold_expr(el),
folder.fold_expr(er)) folder.fold_expr(er))
} }
ExprField(el, ident, tys) => { ExprField(el, ident) => {
ExprField(folder.fold_expr(el), ExprField(folder.fold_expr(el),
respan(ident.span, folder.fold_ident(ident.node)), respan(ident.span, folder.fold_ident(ident.node)))
tys.move_map(|x| folder.fold_ty(x)))
} }
ExprTupField(el, ident, tys) => { ExprTupField(el, ident) => {
ExprTupField(folder.fold_expr(el), ExprTupField(folder.fold_expr(el),
respan(ident.span, folder.fold_uint(ident.node)), respan(ident.span, folder.fold_uint(ident.node)))
tys.move_map(|x| folder.fold_ty(x)))
} }
ExprIndex(el, er) => { ExprIndex(el, er) => {
ExprIndex(folder.fold_expr(el), folder.fold_expr(er)) ExprIndex(folder.fold_expr(el), folder.fold_expr(er))

View file

@ -49,8 +49,7 @@ use ast::{PolyTraitRef};
use ast::{QPath, RequiredMethod}; use ast::{QPath, RequiredMethod};
use ast::{Return, BiShl, BiShr, Stmt, StmtDecl}; use ast::{Return, BiShl, BiShr, Stmt, StmtDecl};
use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField}; use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use ast::{StructVariantKind, BiSub}; use ast::{StructVariantKind, BiSub, StrStyle};
use ast::StrStyle;
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef}; use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
use ast::{TtDelimited, TtSequence, TtToken}; use ast::{TtDelimited, TtSequence, TtToken};
@ -65,23 +64,18 @@ use ast::{UnsafeFn, ViewItem, ViewItem_, ViewItemExternCrate, ViewItemUse};
use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple};
use ast::{Visibility, WhereClause, WherePredicate}; use ast::{Visibility, WhereClause, WherePredicate};
use ast; use ast;
use ast_util::{as_prec, ident_to_path, operator_prec}; use ast_util::{mod, as_prec, ident_to_path, operator_prec};
use ast_util; use codemap::{mod, Span, BytePos, Spanned, spanned, mk_sp};
use codemap::{Span, BytePos, Spanned, spanned, mk_sp};
use codemap;
use diagnostic; use diagnostic;
use ext::tt::macro_parser; use ext::tt::macro_parser;
use parse; use parse;
use parse::attr::ParserAttr; use parse::attr::ParserAttr;
use parse::classify; use parse::classify;
use parse::common::{SeqSep, seq_sep_none}; use parse::common::{SeqSep, seq_sep_none, seq_sep_trailing_allowed};
use parse::common::{seq_sep_trailing_allowed}; use parse::lexer::{Reader, TokenAndSpan};
use parse::lexer::Reader;
use parse::lexer::TokenAndSpan;
use parse::obsolete::*; use parse::obsolete::*;
use parse::token::{MatchNt, SubstNt, InternedString}; use parse::token::{mod, MatchNt, SubstNt, InternedString};
use parse::token::{keywords, special_idents}; use parse::token::{keywords, special_idents};
use parse::token;
use parse::{new_sub_parser_from_file, ParseSess}; use parse::{new_sub_parser_from_file, ParseSess};
use print::pprust; use print::pprust;
use ptr::P; use ptr::P;
@ -89,7 +83,6 @@ use owned_slice::OwnedSlice;
use std::collections::HashSet; use std::collections::HashSet;
use std::io::fs::PathExtensions; use std::io::fs::PathExtensions;
use std::mem::replace;
use std::mem; use std::mem;
use std::num::Float; use std::num::Float;
use std::rc::Rc; use std::rc::Rc;
@ -915,7 +908,7 @@ impl<'a> Parser<'a> {
tok: token::Underscore, tok: token::Underscore,
sp: self.span, sp: self.span,
}; };
replace(&mut self.buffer[buffer_start], placeholder) mem::replace(&mut self.buffer[buffer_start], placeholder)
}; };
self.span = next.sp; self.span = next.sp;
self.token = next.tok; self.token = next.tok;
@ -924,7 +917,7 @@ impl<'a> Parser<'a> {
/// Advance the parser by one token and return the bumped token. /// Advance the parser by one token and return the bumped token.
pub fn bump_and_get(&mut self) -> token::Token { pub fn bump_and_get(&mut self) -> token::Token {
let old_token = replace(&mut self.token, token::Underscore); let old_token = mem::replace(&mut self.token, token::Underscore);
self.bump(); self.bump();
old_token old_token
} }
@ -2103,14 +2096,12 @@ impl<'a> Parser<'a> {
ExprSlice(expr, start, end, mutbl) ExprSlice(expr, start, end, mutbl)
} }
pub fn mk_field(&mut self, expr: P<Expr>, ident: ast::SpannedIdent, pub fn mk_field(&mut self, expr: P<Expr>, ident: ast::SpannedIdent) -> ast::Expr_ {
tys: Vec<P<Ty>>) -> ast::Expr_ { ExprField(expr, ident)
ExprField(expr, ident, tys)
} }
pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>, pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
tys: Vec<P<Ty>>) -> ast::Expr_ { ExprTupField(expr, idx)
ExprTupField(expr, idx, tys)
} }
pub fn mk_assign_op(&mut self, binop: ast::BinOp, pub fn mk_assign_op(&mut self, binop: ast::BinOp,
@ -2465,31 +2456,26 @@ impl<'a> Parser<'a> {
} }
let id = spanned(dot, hi, i); let id = spanned(dot, hi, i);
let field = self.mk_field(e, id, tys); let field = self.mk_field(e, id);
e = self.mk_expr(lo, hi, field); e = self.mk_expr(lo, hi, field);
} }
} }
} }
token::Literal(token::Integer(n), suf) => { token::Literal(token::Integer(n), suf) => {
let sp = self.span; let sp = self.span;
// A tuple index may not have a suffix
self.expect_no_suffix(sp, "tuple index", suf); self.expect_no_suffix(sp, "tuple index", suf);
let index = n.as_str();
let dot = self.last_span.hi; let dot = self.last_span.hi;
hi = self.span.hi; hi = self.span.hi;
self.bump(); self.bump();
let (_, tys) = if self.eat(&token::ModSep) {
self.expect_lt();
self.parse_generic_values_after_lt()
} else {
(Vec::new(), Vec::new())
};
let num = from_str::<uint>(index); let index = from_str::<uint>(n.as_str());
match num { match index {
Some(n) => { Some(n) => {
let id = spanned(dot, hi, n); let id = spanned(dot, hi, n);
let field = self.mk_tup_field(e, id, tys); let field = self.mk_tup_field(e, id);
e = self.mk_expr(lo, hi, field); e = self.mk_expr(lo, hi, field);
} }
None => { None => {

View file

@ -11,31 +11,25 @@
pub use self::AnnNode::*; pub use self::AnnNode::*;
use abi; use abi;
use ast::{FnUnboxedClosureKind, FnMutUnboxedClosureKind}; use ast::{mod, FnUnboxedClosureKind, FnMutUnboxedClosureKind};
use ast::{FnOnceUnboxedClosureKind}; use ast::{FnOnceUnboxedClosureKind};
use ast::{MethodImplItem, RegionTyParamBound, TraitTyParamBound}; use ast::{MethodImplItem, RegionTyParamBound, TraitTyParamBound};
use ast::{RequiredMethod, ProvidedMethod, TypeImplItem, TypeTraitItem}; use ast::{RequiredMethod, ProvidedMethod, TypeImplItem, TypeTraitItem};
use ast::{UnboxedClosureKind}; use ast::{UnboxedClosureKind};
use ast;
use ast_util; use ast_util;
use owned_slice::OwnedSlice; use owned_slice::OwnedSlice;
use attr::{AttrMetaMethods, AttributeMethods}; use attr::{AttrMetaMethods, AttributeMethods};
use codemap::{CodeMap, BytePos}; use codemap::{mod, CodeMap, BytePos};
use codemap;
use diagnostic; use diagnostic;
use parse::token::{BinOpToken, Token}; use parse::token::{mod, BinOpToken, Token};
use parse::token;
use parse::lexer::comments; use parse::lexer::comments;
use parse; use parse;
use print::pp::{break_offset, word, space, zerobreak, hardbreak}; use print::pp::{mod, break_offset, word, space, zerobreak, hardbreak};
use print::pp::{Breaks, Consistent, Inconsistent, eof}; use print::pp::{Breaks, Consistent, Inconsistent, eof};
use print::pp;
use ptr::P; use ptr::P;
use std::ascii; use std::{ascii, mem};
use std::io::IoResult; use std::io::{mod, IoResult};
use std::io;
use std::mem;
pub enum AnnNode<'a> { pub enum AnnNode<'a> {
NodeIdent(&'a ast::Ident), NodeIdent(&'a ast::Ident),
@ -1734,29 +1728,15 @@ impl<'a> State<'a> {
try!(self.word_space("=")); try!(self.word_space("="));
try!(self.print_expr(&**rhs)); try!(self.print_expr(&**rhs));
} }
ast::ExprField(ref expr, id, ref tys) => { ast::ExprField(ref expr, id) => {
try!(self.print_expr(&**expr)); try!(self.print_expr(&**expr));
try!(word(&mut self.s, ".")); try!(word(&mut self.s, "."));
try!(self.print_ident(id.node)); try!(self.print_ident(id.node));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(
Inconsistent, tys.as_slice(),
|s, ty| s.print_type(&**ty)));
try!(word(&mut self.s, ">"));
}
} }
ast::ExprTupField(ref expr, id, ref tys) => { ast::ExprTupField(ref expr, id) => {
try!(self.print_expr(&**expr)); try!(self.print_expr(&**expr));
try!(word(&mut self.s, ".")); try!(word(&mut self.s, "."));
try!(self.print_uint(id.node)); try!(self.print_uint(id.node));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(
Inconsistent, tys.as_slice(),
|s, ty| s.print_type(&**ty)));
try!(word(&mut self.s, ">"));
}
} }
ast::ExprIndex(ref expr, ref index) => { ast::ExprIndex(ref expr, ref index) => {
try!(self.print_expr(&**expr)); try!(self.print_expr(&**expr));
@ -2164,21 +2144,22 @@ impl<'a> State<'a> {
try!(self.print_pat(&**p)); try!(self.print_pat(&**p));
} }
try!(space(&mut self.s)); try!(space(&mut self.s));
match arm.guard { if let Some(ref e) = arm.guard {
Some(ref e) => { try!(self.word_space("if"));
try!(self.word_space("if")); try!(self.print_expr(&**e));
try!(self.print_expr(&**e)); try!(space(&mut self.s));
try!(space(&mut self.s));
}
None => ()
} }
try!(self.word_space("=>")); try!(self.word_space("=>"));
match arm.body.node { match arm.body.node {
ast::ExprBlock(ref blk) => { ast::ExprBlock(ref blk) => {
// the block will close the pattern's ibox // the block will close the pattern's ibox
try!(self.print_block_unclosed_indent(&**blk, try!(self.print_block_unclosed_indent(&**blk, indent_unit));
indent_unit));
// If it is a user-provided unsafe block, print a comma after it
if let ast::UnsafeBlock(ast::UserProvided) = blk.rules {
try!(word(&mut self.s, ","));
}
} }
_ => { _ => {
try!(self.end()); // close the ibox for the pattern try!(self.end()); // close the ibox for the pattern

View file

@ -838,17 +838,11 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_expr(&**right_expression); visitor.visit_expr(&**right_expression);
visitor.visit_expr(&**left_expression) visitor.visit_expr(&**left_expression)
} }
ExprField(ref subexpression, _, ref types) => { ExprField(ref subexpression, _) => {
visitor.visit_expr(&**subexpression); visitor.visit_expr(&**subexpression);
for typ in types.iter() {
visitor.visit_ty(&**typ)
}
} }
ExprTupField(ref subexpression, _, ref types) => { ExprTupField(ref subexpression, _) => {
visitor.visit_expr(&**subexpression); visitor.visit_expr(&**subexpression);
for typ in types.iter() {
visitor.visit_ty(&**typ)
}
} }
ExprIndex(ref main_expression, ref index_expression) => { ExprIndex(ref main_expression, ref index_expression) => {
visitor.visit_expr(&**main_expression); visitor.visit_expr(&**main_expression);

View file

@ -533,9 +533,8 @@ fn format(val: Param, op: FormatOp, flags: Flags) -> Result<Vec<u8> ,String> {
FormatHEX => { FormatHEX => {
s = s.as_slice() s = s.as_slice()
.to_ascii() .to_ascii()
.to_uppercase() .iter()
.into_bytes() .map(|b| b.to_uppercase().as_byte())
.into_iter()
.collect(); .collect();
if flags.alternate { if flags.alternate {
let s_ = replace(&mut s, vec!(b'0', b'X')); let s_ = replace(&mut s, vec!(b'0', b'X'));

View file

@ -1,3 +1,12 @@
S 2014-11-21 c9f6d69
freebsd-x86_64 0ef316e7c369177de043e69e964418bd637cbfc0
linux-i386 c8342e762a1720be939ed7c6a39bdaa27892f66f
linux-x86_64 7a7fe6f5ed47b9cc66261f880e166c7c8738b73e
macos-i386 63e8644512bd5665c14389a83d5af564c7c0b103
macos-x86_64 7933ae0e974d1b897806138b7052cb2b4514585f
winnt-i386 94f5e2974e6120945c909753010d73b53cd6ff90
winnt-x86_64 905ffbdd94580854b01dc4e27fdad7e7c8ae18fe
S 2014-11-18 9c96a79 S 2014-11-18 9c96a79
freebsd-x86_64 22c93a289bdbc886af882b5bb76bfa673d46aa4f freebsd-x86_64 22c93a289bdbc886af882b5bb76bfa673d46aa4f
linux-i386 999ba4a0dfb70adca628138a7d5f491023621140 linux-i386 999ba4a0dfb70adca628138a7d5f491023621140

View file

@ -0,0 +1,16 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(tuple_indexing)]
fn main() {
let t = (42i, 42i);
t.0::<int>; //~ ERROR expected one of `;`, `}`, found `::`
}

View file

@ -30,7 +30,6 @@ pub trait PartialEq for Sized? {
fn eq(&self, other: &Self) -> bool; fn eq(&self, other: &Self) -> bool;
} }
#[cfg(not(stage0))] // NOTE(stage0): remove cfg after a snapshot
#[unstable = "Trait is unstable."] #[unstable = "Trait is unstable."]
impl<'a, Sized? T: PartialEq> PartialEq for &'a T { impl<'a, Sized? T: PartialEq> PartialEq for &'a T {
#[inline] #[inline]

View file

@ -0,0 +1,20 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// Testing that unsafe blocks in match arms are followed by a comma
// pp-exact
fn main() {
match true {
true if true => (),
false if false => unsafe { },
true => { }
false => (),
}
}

View file

@ -8,6 +8,8 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
// ignore-pretty
//
// exec-env:RUST_MIN_STACK=16000000 // exec-env:RUST_MIN_STACK=16000000
// //
// Big stack is needed for pretty printing, a little sad... // Big stack is needed for pretty printing, a little sad...