std: Deprecate a number of unstable features
Many of these have long since reached their stage of being obsolete, so this commit starts the removal process for all of them. The unstable features that were deprecated are: * cmp_partial * fs_time * hash_default * int_slice * iter_min_max * iter_reset_fuse * iter_to_vec * map_in_place * move_from * owned_ascii_ext * page_size * read_and_zero * scan_state * slice_chars * slice_position_elem * subslice_offset
This commit is contained in:
parent
a5c12f4e39
commit
b3aa1a6d4a
27 changed files with 80 additions and 33 deletions
|
@ -86,10 +86,13 @@ use core::raw::{TraitObject};
|
||||||
#[lang = "exchange_heap"]
|
#[lang = "exchange_heap"]
|
||||||
#[unstable(feature = "box_heap",
|
#[unstable(feature = "box_heap",
|
||||||
reason = "may be renamed; uncertain about custom allocator design")]
|
reason = "may be renamed; uncertain about custom allocator design")]
|
||||||
|
#[allow(deprecated)]
|
||||||
pub const HEAP: ExchangeHeapSingleton =
|
pub const HEAP: ExchangeHeapSingleton =
|
||||||
ExchangeHeapSingleton { _force_singleton: () };
|
ExchangeHeapSingleton { _force_singleton: () };
|
||||||
|
|
||||||
/// This the singleton type used solely for `boxed::HEAP`.
|
/// This the singleton type used solely for `boxed::HEAP`.
|
||||||
|
#[unstable(feature = "box_heap",
|
||||||
|
reason = "may be renamed; uncertain about custom allocator design")]
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct ExchangeHeapSingleton { _force_singleton: () }
|
pub struct ExchangeHeapSingleton { _force_singleton: () }
|
||||||
|
|
||||||
|
|
|
@ -762,12 +762,16 @@ impl<T> [T] {
|
||||||
|
|
||||||
/// Find the first index containing a matching value.
|
/// Find the first index containing a matching value.
|
||||||
#[unstable(feature = "slice_position_elem")]
|
#[unstable(feature = "slice_position_elem")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "less idiomatic than .iter().position()")]
|
||||||
pub fn position_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
|
pub fn position_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
|
||||||
core_slice::SliceExt::position_elem(self, t)
|
core_slice::SliceExt::position_elem(self, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find the last index containing a matching value.
|
/// Find the last index containing a matching value.
|
||||||
#[unstable(feature = "slice_position_elem")]
|
#[unstable(feature = "slice_position_elem")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "less idiomatic than .iter().rev().position()")]
|
||||||
pub fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
|
pub fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
|
||||||
core_slice::SliceExt::rposition_elem(self, t)
|
core_slice::SliceExt::rposition_elem(self, t)
|
||||||
}
|
}
|
||||||
|
@ -1009,6 +1013,8 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "move_from",
|
#[unstable(feature = "move_from",
|
||||||
reason = "uncertain about this API approach")]
|
reason = "uncertain about this API approach")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "unclear that it must belong in the standard library")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn move_from(&mut self, mut src: Vec<T>, start: usize, end: usize) -> usize {
|
pub fn move_from(&mut self, mut src: Vec<T>, start: usize, end: usize) -> usize {
|
||||||
for (a, b) in self.iter_mut().zip(&mut src[start .. end]) {
|
for (a, b) in self.iter_mut().zip(&mut src[start .. end]) {
|
||||||
|
|
|
@ -552,6 +552,9 @@ impl str {
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "slice_chars",
|
#[unstable(feature = "slice_chars",
|
||||||
reason = "may have yet to prove its worth")]
|
reason = "may have yet to prove its worth")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "can be implemented with char_indices and \
|
||||||
|
hasn't seen enough use to justify inclusion")]
|
||||||
pub fn slice_chars(&self, begin: usize, end: usize) -> &str {
|
pub fn slice_chars(&self, begin: usize, end: usize) -> &str {
|
||||||
core_str::StrExt::slice_chars(self, begin, end)
|
core_str::StrExt::slice_chars(self, begin, end)
|
||||||
}
|
}
|
||||||
|
@ -1642,6 +1645,8 @@ impl str {
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "subslice_offset",
|
#[unstable(feature = "subslice_offset",
|
||||||
reason = "awaiting convention about comparability of arbitrary slices")]
|
reason = "awaiting convention about comparability of arbitrary slices")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "replaced with other pattern-related methods")]
|
||||||
pub fn subslice_offset(&self, inner: &str) -> usize {
|
pub fn subslice_offset(&self, inner: &str) -> usize {
|
||||||
core_str::StrExt::subslice_offset(self, inner)
|
core_str::StrExt::subslice_offset(self, inner)
|
||||||
}
|
}
|
||||||
|
|
|
@ -772,6 +772,9 @@ impl<T> Vec<T> {
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "map_in_place",
|
#[unstable(feature = "map_in_place",
|
||||||
reason = "API may change to provide stronger guarantees")]
|
reason = "API may change to provide stronger guarantees")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "unclear that the API is strong enough and did \
|
||||||
|
not proven itself")]
|
||||||
pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
|
pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
|
||||||
// FIXME: Assert statically that the types `T` and `U` have the same
|
// FIXME: Assert statically that the types `T` and `U` have the same
|
||||||
// size.
|
// size.
|
||||||
|
@ -1627,6 +1630,7 @@ impl<T> IntoIter<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
/// Drops all items that have not yet been moved and returns the empty vector.
|
/// Drops all items that have not yet been moved and returns the empty vector.
|
||||||
#[unstable(feature = "iter_to_vec")]
|
#[unstable(feature = "iter_to_vec")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "replaced by drain()")]
|
||||||
pub fn into_inner(mut self) -> Vec<T> {
|
pub fn into_inner(mut self) -> Vec<T> {
|
||||||
unsafe {
|
unsafe {
|
||||||
for _x in self.by_ref() { }
|
for _x in self.by_ref() { }
|
||||||
|
|
|
@ -401,6 +401,7 @@ pub fn max<T: Ord>(v1: T, v2: T) -> T {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[unstable(feature = "cmp_partial")]
|
#[unstable(feature = "cmp_partial")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "has not proven itself worthwhile")]
|
||||||
pub fn partial_min<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
pub fn partial_min<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
||||||
match v1.partial_cmp(&v2) {
|
match v1.partial_cmp(&v2) {
|
||||||
Some(Less) | Some(Equal) => Some(v1),
|
Some(Less) | Some(Equal) => Some(v1),
|
||||||
|
@ -434,6 +435,7 @@ pub fn partial_min<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[unstable(feature = "cmp_partial")]
|
#[unstable(feature = "cmp_partial")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "has not proven itself worthwhile")]
|
||||||
pub fn partial_max<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
pub fn partial_max<T: PartialOrd>(v1: T, v2: T) -> Option<T> {
|
||||||
match v1.partial_cmp(&v2) {
|
match v1.partial_cmp(&v2) {
|
||||||
Some(Equal) | Some(Less) => Some(v2),
|
Some(Equal) | Some(Less) => Some(v2),
|
||||||
|
|
|
@ -171,6 +171,8 @@ pub trait Hasher {
|
||||||
#[unstable(feature = "hash_default",
|
#[unstable(feature = "hash_default",
|
||||||
reason = "not the most ergonomic interface unless `H` is defaulted \
|
reason = "not the most ergonomic interface unless `H` is defaulted \
|
||||||
to SipHasher, but perhaps not ready to commit to that")]
|
to SipHasher, but perhaps not ready to commit to that")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "has yet to prove itself useful")]
|
||||||
pub fn hash<T: Hash, H: Hasher + Default>(value: &T) -> u64 {
|
pub fn hash<T: Hash, H: Hasher + Default>(value: &T) -> u64 {
|
||||||
let mut h: H = Default::default();
|
let mut h: H = Default::default();
|
||||||
value.hash(&mut h);
|
value.hash(&mut h);
|
||||||
|
|
|
@ -56,6 +56,7 @@
|
||||||
|
|
||||||
#![stable(feature = "rust1", since = "1.0.0")]
|
#![stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
use self::MinMaxResult::*;
|
use self::MinMaxResult::*;
|
||||||
|
|
||||||
use clone::Clone;
|
use clone::Clone;
|
||||||
|
@ -445,6 +446,7 @@ pub trait Iterator {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
|
fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
|
||||||
where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
|
where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,
|
||||||
{
|
{
|
||||||
|
@ -840,6 +842,8 @@ pub trait Iterator {
|
||||||
#[unstable(feature = "iter_min_max",
|
#[unstable(feature = "iter_min_max",
|
||||||
reason = "return type may change or may wish to have a closure \
|
reason = "return type may change or may wish to have a closure \
|
||||||
based version as well")]
|
based version as well")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "has not proven itself")]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn min_max(mut self) -> MinMaxResult<Self::Item> where Self: Sized, Self::Item: Ord
|
fn min_max(mut self) -> MinMaxResult<Self::Item> where Self: Sized, Self::Item: Ord
|
||||||
{
|
{
|
||||||
let (mut min, mut max) = match self.next() {
|
let (mut min, mut max) = match self.next() {
|
||||||
|
@ -1336,6 +1340,8 @@ impl<I> RandomAccessIterator for Rev<I>
|
||||||
#[derive(Clone, PartialEq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
#[unstable(feature = "iter_min_max",
|
#[unstable(feature = "iter_min_max",
|
||||||
reason = "unclear whether such a fine-grained result is widely useful")]
|
reason = "unclear whether such a fine-grained result is widely useful")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "has not proven itself")]
|
||||||
|
#[allow(deprecated)]
|
||||||
pub enum MinMaxResult<T> {
|
pub enum MinMaxResult<T> {
|
||||||
/// Empty iterator
|
/// Empty iterator
|
||||||
NoElements,
|
NoElements,
|
||||||
|
@ -1349,6 +1355,8 @@ pub enum MinMaxResult<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "iter_min_max", reason = "type is unstable")]
|
#[unstable(feature = "iter_min_max", reason = "type is unstable")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "has not proven itself")]
|
||||||
|
#[allow(deprecated)]
|
||||||
impl<T: Clone> MinMaxResult<T> {
|
impl<T: Clone> MinMaxResult<T> {
|
||||||
/// `into_option` creates an `Option` of type `(T,T)`. The returned `Option`
|
/// `into_option` creates an `Option` of type `(T,T)`. The returned `Option`
|
||||||
/// has variant `None` if and only if the `MinMaxResult` has variant
|
/// has variant `None` if and only if the `MinMaxResult` has variant
|
||||||
|
@ -2249,6 +2257,7 @@ impl<I> ExactSizeIterator for Take<I> where I: ExactSizeIterator {}
|
||||||
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
|
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
#[allow(deprecated)]
|
||||||
pub struct Scan<I, St, F> {
|
pub struct Scan<I, St, F> {
|
||||||
iter: I,
|
iter: I,
|
||||||
f: F,
|
f: F,
|
||||||
|
@ -2256,6 +2265,7 @@ pub struct Scan<I, St, F> {
|
||||||
/// The current internal state to be passed to the closure next.
|
/// The current internal state to be passed to the closure next.
|
||||||
#[unstable(feature = "scan_state",
|
#[unstable(feature = "scan_state",
|
||||||
reason = "public fields are otherwise rare in the stdlib")]
|
reason = "public fields are otherwise rare in the stdlib")]
|
||||||
|
#[deprecated(since = "1.3.0", reason = "unclear whether this is necessary")]
|
||||||
pub state: St,
|
pub state: St,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2267,6 +2277,7 @@ impl<B, I, St, F> Iterator for Scan<I, St, F> where
|
||||||
type Item = B;
|
type Item = B;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn next(&mut self) -> Option<B> {
|
fn next(&mut self) -> Option<B> {
|
||||||
self.iter.next().and_then(|a| (self.f)(&mut self.state, a))
|
self.iter.next().and_then(|a| (self.f)(&mut self.state, a))
|
||||||
}
|
}
|
||||||
|
@ -2448,6 +2459,8 @@ impl<I> Fuse<I> {
|
||||||
/// previously returned `None`.
|
/// previously returned `None`.
|
||||||
#[inline]
|
#[inline]
|
||||||
#[unstable(feature = "iter_reset_fuse", reason = "seems marginal")]
|
#[unstable(feature = "iter_reset_fuse", reason = "seems marginal")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "unusual for adaptors to have one-off methods")]
|
||||||
pub fn reset_fuse(&mut self) {
|
pub fn reset_fuse(&mut self) {
|
||||||
self.done = false
|
self.done = false
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,6 +131,9 @@ pub unsafe fn read<T>(src: *const T) -> T {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[unstable(feature = "read_and_zero",
|
#[unstable(feature = "read_and_zero",
|
||||||
reason = "may play a larger role in std::ptr future extensions")]
|
reason = "may play a larger role in std::ptr future extensions")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "a \"zero value\" will soon not actually exist for all \
|
||||||
|
types once dynamic drop has been implemented")]
|
||||||
pub unsafe fn read_and_zero<T>(dest: *mut T) -> T {
|
pub unsafe fn read_and_zero<T>(dest: *mut T) -> T {
|
||||||
// Copy the data out from `dest`:
|
// Copy the data out from `dest`:
|
||||||
let tmp = read(&*dest);
|
let tmp = read(&*dest);
|
||||||
|
|
|
@ -37,13 +37,11 @@
|
||||||
#![feature(dynamic_lib)]
|
#![feature(dynamic_lib)]
|
||||||
#![feature(enumset)]
|
#![feature(enumset)]
|
||||||
#![feature(fs_canonicalize)]
|
#![feature(fs_canonicalize)]
|
||||||
#![feature(hash_default)]
|
|
||||||
#![feature(hashmap_hasher)]
|
#![feature(hashmap_hasher)]
|
||||||
#![feature(into_cow)]
|
#![feature(into_cow)]
|
||||||
#![feature(iter_cmp)]
|
#![feature(iter_cmp)]
|
||||||
#![feature(iter_arith)]
|
#![feature(iter_arith)]
|
||||||
#![feature(libc)]
|
#![feature(libc)]
|
||||||
#![feature(map_in_place)]
|
|
||||||
#![feature(num_bits_bytes)]
|
#![feature(num_bits_bytes)]
|
||||||
#![feature(path_ext)]
|
#![feature(path_ext)]
|
||||||
#![feature(quote)]
|
#![feature(quote)]
|
||||||
|
@ -55,8 +53,6 @@
|
||||||
#![feature(slice_bytes)]
|
#![feature(slice_bytes)]
|
||||||
#![feature(slice_splits)]
|
#![feature(slice_splits)]
|
||||||
#![feature(slice_patterns)]
|
#![feature(slice_patterns)]
|
||||||
#![feature(slice_position_elem)]
|
|
||||||
#![feature(slice_concat_ext)]
|
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
#![feature(str_char)]
|
#![feature(str_char)]
|
||||||
#![feature(str_match_indices)]
|
#![feature(str_match_indices)]
|
||||||
|
|
|
@ -660,14 +660,14 @@ pub fn import_codemap(local_codemap: &codemap::CodeMap,
|
||||||
// `CodeMap::new_imported_filemap()` will then translate those
|
// `CodeMap::new_imported_filemap()` will then translate those
|
||||||
// coordinates to their new global frame of reference when the
|
// coordinates to their new global frame of reference when the
|
||||||
// offset of the FileMap is known.
|
// offset of the FileMap is known.
|
||||||
let lines = lines.into_inner().map_in_place(|pos| pos - start_pos);
|
let mut lines = lines.into_inner();
|
||||||
let multibyte_chars = multibyte_chars
|
for pos in &mut lines {
|
||||||
.into_inner()
|
*pos = *pos - start_pos;
|
||||||
.map_in_place(|mbc|
|
}
|
||||||
codemap::MultiByteChar {
|
let mut multibyte_chars = multibyte_chars.into_inner();
|
||||||
pos: mbc.pos - start_pos,
|
for mbc in &mut multibyte_chars {
|
||||||
bytes: mbc.bytes
|
mbc.pos = mbc.pos - start_pos;
|
||||||
});
|
}
|
||||||
|
|
||||||
let local_version = local_codemap.new_imported_filemap(name,
|
let local_version = local_codemap.new_imported_filemap(name,
|
||||||
source_length,
|
source_length,
|
||||||
|
|
|
@ -197,7 +197,9 @@ impl CStore {
|
||||||
}))
|
}))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
libs.sort_by(|&(a, _), &(b, _)| {
|
libs.sort_by(|&(a, _), &(b, _)| {
|
||||||
ordering.position_elem(&a).cmp(&ordering.position_elem(&b))
|
let a = ordering.iter().position(|x| *x == a);
|
||||||
|
let b = ordering.iter().position(|x| *x == b);
|
||||||
|
a.cmp(&b)
|
||||||
});
|
});
|
||||||
libs
|
libs
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ use util::nodemap::FnvHashMap;
|
||||||
|
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::hash::{self, Hash, SipHasher};
|
use std::hash::{Hash, SipHasher, Hasher};
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
@ -89,9 +89,9 @@ pub fn maybe_find_item<'a>(item_id: ast::NodeId,
|
||||||
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
|
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
|
||||||
u32_from_be_bytes(bytes) == item_id
|
u32_from_be_bytes(bytes) == item_id
|
||||||
}
|
}
|
||||||
lookup_hash(items,
|
let mut s = SipHasher::new_with_keys(0, 0);
|
||||||
|a| eq_item(a, item_id),
|
(item_id as i64).hash(&mut s);
|
||||||
hash::hash::<i64, SipHasher>(&(item_id as i64)))
|
lookup_hash(items, |a| eq_item(a, item_id), s.finish())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_item<'a>(item_id: ast::NodeId, items: rbml::Doc<'a>) -> rbml::Doc<'a> {
|
fn find_item<'a>(item_id: ast::NodeId, items: rbml::Doc<'a>) -> rbml::Doc<'a> {
|
||||||
|
|
|
@ -79,7 +79,6 @@ use std::ops;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::vec::IntoIter;
|
use std::vec::IntoIter;
|
||||||
use collections::enum_set::{self, EnumSet, CLike};
|
use collections::enum_set::{self, EnumSet, CLike};
|
||||||
use collections::slice::SliceConcatExt;
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use syntax::abi;
|
use syntax::abi;
|
||||||
use syntax::ast::{CrateNum, DefId, ItemImpl, ItemTrait, LOCAL_CRATE};
|
use syntax::ast::{CrateNum, DefId, ItemImpl, ItemTrait, LOCAL_CRATE};
|
||||||
|
|
|
@ -124,6 +124,7 @@ fn get_working_set_size() -> Option<usize> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(windows, allow(dead_code))]
|
#[cfg_attr(windows, allow(dead_code))]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn get_proc_self_statm_field(field: usize) -> Option<usize> {
|
fn get_proc_self_statm_field(field: usize) -> Option<usize> {
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
|
|
@ -279,7 +279,7 @@ impl<'tcx> fmt::Display for ty::TraitTy<'tcx> {
|
||||||
.expect("could not lift TraitRef for printing");
|
.expect("could not lift TraitRef for printing");
|
||||||
let projections = tcx.lift(&bounds.projection_bounds[..])
|
let projections = tcx.lift(&bounds.projection_bounds[..])
|
||||||
.expect("could not lift projections for printing");
|
.expect("could not lift projections for printing");
|
||||||
let projections = projections.map_in_place(|p| p.0);
|
let projections = projections.into_iter().map(|p| p.0).collect();
|
||||||
|
|
||||||
let tap = ty::Binder(TraitAndProjections(principal, projections));
|
let tap = ty::Binder(TraitAndProjections(principal, projections));
|
||||||
in_binder(f, tcx, &ty::Binder(""), Some(tap))
|
in_binder(f, tcx, &ty::Binder(""), Some(tap))
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
//! both occur before the crate is rendered.
|
//! both occur before the crate is rendered.
|
||||||
pub use self::ExternalLocation::*;
|
pub use self::ExternalLocation::*;
|
||||||
|
|
||||||
use std::ascii::OwnedAsciiExt;
|
use std::ascii::AsciiExt;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||||
|
@ -2547,7 +2547,7 @@ fn get_index_search_type(item: &clean::Item,
|
||||||
|
|
||||||
// Consider `self` an argument as well.
|
// Consider `self` an argument as well.
|
||||||
if let Some(name) = parent {
|
if let Some(name) = parent {
|
||||||
inputs.push(Type { name: Some(name.into_ascii_lowercase()) });
|
inputs.push(Type { name: Some(name.to_ascii_lowercase()) });
|
||||||
}
|
}
|
||||||
|
|
||||||
inputs.extend(&mut decl.inputs.values.iter().map(|arg| {
|
inputs.extend(&mut decl.inputs.values.iter().map(|arg| {
|
||||||
|
@ -2563,7 +2563,7 @@ fn get_index_search_type(item: &clean::Item,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_index_type(clean_type: &clean::Type) -> Type {
|
fn get_index_type(clean_type: &clean::Type) -> Type {
|
||||||
Type { name: get_index_type_name(clean_type).map(|s| s.into_ascii_lowercase()) }
|
Type { name: get_index_type_name(clean_type).map(|s| s.to_ascii_lowercase()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_index_type_name(clean_type: &clean::Type) -> Option<String> {
|
fn get_index_type_name(clean_type: &clean::Type) -> Option<String> {
|
||||||
|
|
|
@ -24,14 +24,12 @@
|
||||||
#![feature(box_syntax)]
|
#![feature(box_syntax)]
|
||||||
#![feature(dynamic_lib)]
|
#![feature(dynamic_lib)]
|
||||||
#![feature(libc)]
|
#![feature(libc)]
|
||||||
#![feature(owned_ascii_ext)]
|
|
||||||
#![feature(path_ext)]
|
#![feature(path_ext)]
|
||||||
#![feature(path_relative_from)]
|
#![feature(path_relative_from)]
|
||||||
#![feature(rustc_private)]
|
#![feature(rustc_private)]
|
||||||
#![feature(set_stdio)]
|
#![feature(set_stdio)]
|
||||||
#![feature(slice_patterns)]
|
#![feature(slice_patterns)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
#![feature(subslice_offset)]
|
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
#![feature(unicode)]
|
#![feature(unicode)]
|
||||||
#![feature(vec_push_all)]
|
#![feature(vec_push_all)]
|
||||||
|
|
|
@ -34,7 +34,7 @@ fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
|
||||||
// remove %<whitespace>
|
// remove %<whitespace>
|
||||||
metadata.push(line[1..].trim_left())
|
metadata.push(line[1..].trim_left())
|
||||||
} else {
|
} else {
|
||||||
let line_start_byte = s.subslice_offset(line);
|
let line_start_byte = s.find(line).unwrap();
|
||||||
return (metadata, &s[line_start_byte..]);
|
return (metadata, &s[line_start_byte..]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,9 @@ use mem;
|
||||||
/// Extension methods for ASCII-subset only operations on owned strings
|
/// Extension methods for ASCII-subset only operations on owned strings
|
||||||
#[unstable(feature = "owned_ascii_ext",
|
#[unstable(feature = "owned_ascii_ext",
|
||||||
reason = "would prefer to do this in a more general way")]
|
reason = "would prefer to do this in a more general way")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "hasn't yet proved essential to be in the standard library")]
|
||||||
|
#[allow(deprecated)]
|
||||||
pub trait OwnedAsciiExt {
|
pub trait OwnedAsciiExt {
|
||||||
/// Converts the string to ASCII upper case:
|
/// Converts the string to ASCII upper case:
|
||||||
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
|
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
|
||||||
|
@ -164,11 +167,13 @@ impl AsciiExt for str {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn to_ascii_uppercase(&self) -> String {
|
fn to_ascii_uppercase(&self) -> String {
|
||||||
self.to_string().into_ascii_uppercase()
|
self.to_string().into_ascii_uppercase()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn to_ascii_lowercase(&self) -> String {
|
fn to_ascii_lowercase(&self) -> String {
|
||||||
self.to_string().into_ascii_lowercase()
|
self.to_string().into_ascii_lowercase()
|
||||||
}
|
}
|
||||||
|
@ -189,6 +194,7 @@ impl AsciiExt for str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
impl OwnedAsciiExt for String {
|
impl OwnedAsciiExt for String {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn into_ascii_uppercase(self) -> String {
|
fn into_ascii_uppercase(self) -> String {
|
||||||
|
@ -212,11 +218,13 @@ impl AsciiExt for [u8] {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn to_ascii_uppercase(&self) -> Vec<u8> {
|
fn to_ascii_uppercase(&self) -> Vec<u8> {
|
||||||
self.to_vec().into_ascii_uppercase()
|
self.to_vec().into_ascii_uppercase()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(deprecated)]
|
||||||
fn to_ascii_lowercase(&self) -> Vec<u8> {
|
fn to_ascii_lowercase(&self) -> Vec<u8> {
|
||||||
self.to_vec().into_ascii_lowercase()
|
self.to_vec().into_ascii_lowercase()
|
||||||
}
|
}
|
||||||
|
@ -242,6 +250,7 @@ impl AsciiExt for [u8] {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
impl OwnedAsciiExt for Vec<u8> {
|
impl OwnedAsciiExt for Vec<u8> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn into_ascii_uppercase(mut self) -> Vec<u8> {
|
fn into_ascii_uppercase(mut self) -> Vec<u8> {
|
||||||
|
|
|
@ -590,6 +590,8 @@ impl ExactSizeIterator for ArgsOs {
|
||||||
|
|
||||||
/// Returns the page size of the current architecture in bytes.
|
/// Returns the page size of the current architecture in bytes.
|
||||||
#[unstable(feature = "page_size", reason = "naming and/or location may change")]
|
#[unstable(feature = "page_size", reason = "naming and/or location may change")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "hasn't seen enough usage to justify inclusion")]
|
||||||
pub fn page_size() -> usize {
|
pub fn page_size() -> usize {
|
||||||
os_imp::page_size()
|
os_imp::page_size()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1225,6 +1225,9 @@ impl PathExt for Path {
|
||||||
reason = "the argument type of u64 is not quite appropriate for \
|
reason = "the argument type of u64 is not quite appropriate for \
|
||||||
this function and may change if the standard library \
|
this function and may change if the standard library \
|
||||||
gains a type to represent a moment in time")]
|
gains a type to represent a moment in time")]
|
||||||
|
#[deprecated(since = "1.3.0",
|
||||||
|
reason = "will never be stabilized as-is and its replacement will \
|
||||||
|
likely have a totally new API")]
|
||||||
pub fn set_file_times<P: AsRef<Path>>(path: P, accessed: u64,
|
pub fn set_file_times<P: AsRef<Path>>(path: P, accessed: u64,
|
||||||
modified: u64) -> io::Result<()> {
|
modified: u64) -> io::Result<()> {
|
||||||
fs_imp::utimes(path.as_ref(), accessed, modified)
|
fs_imp::utimes(path.as_ref(), accessed, modified)
|
||||||
|
|
|
@ -747,7 +747,7 @@ impl<W: Write> LineWriter<W> {
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<W: Write> Write for LineWriter<W> {
|
impl<W: Write> Write for LineWriter<W> {
|
||||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
match buf.rposition_elem(&b'\n') {
|
match buf.iter().rposition(|b| *b == b'\n') {
|
||||||
Some(i) => {
|
Some(i) => {
|
||||||
let n = try!(self.inner.write(&buf[..i + 1]));
|
let n = try!(self.inner.write(&buf[..i + 1]));
|
||||||
if n != i + 1 { return Ok(n) }
|
if n != i + 1 { return Ok(n) }
|
||||||
|
|
|
@ -1105,7 +1105,7 @@ fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>)
|
||||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||||
Err(e) => return Err(e)
|
Err(e) => return Err(e)
|
||||||
};
|
};
|
||||||
match available.position_elem(&delim) {
|
match available.iter().position(|x| *x == delim) {
|
||||||
Some(i) => {
|
Some(i) => {
|
||||||
buf.push_all(&available[..i + 1]);
|
buf.push_all(&available[..i + 1]);
|
||||||
(true, i + 1)
|
(true, i + 1)
|
||||||
|
|
|
@ -232,7 +232,6 @@
|
||||||
#![feature(linkage, thread_local, asm)]
|
#![feature(linkage, thread_local, asm)]
|
||||||
#![feature(macro_reexport)]
|
#![feature(macro_reexport)]
|
||||||
#![feature(slice_concat_ext)]
|
#![feature(slice_concat_ext)]
|
||||||
#![feature(slice_position_elem)]
|
|
||||||
#![feature(no_std)]
|
#![feature(no_std)]
|
||||||
#![feature(oom)]
|
#![feature(oom)]
|
||||||
#![feature(optin_builtin_traits)]
|
#![feature(optin_builtin_traits)]
|
||||||
|
|
|
@ -200,7 +200,7 @@ mod platform {
|
||||||
return Some(VerbatimUNC(server, share));
|
return Some(VerbatimUNC(server, share));
|
||||||
} else {
|
} else {
|
||||||
// \\?\path
|
// \\?\path
|
||||||
let idx = path.position_elem(&b'\\');
|
let idx = path.iter().position(|&b| b == b'\\');
|
||||||
if idx == Some(2) && path[1] == b':' {
|
if idx == Some(2) && path[1] == b':' {
|
||||||
let c = path[0];
|
let c = path[0];
|
||||||
if c.is_ascii() && (c as char).is_alphabetic() {
|
if c.is_ascii() && (c as char).is_alphabetic() {
|
||||||
|
@ -214,7 +214,8 @@ mod platform {
|
||||||
} else if path.starts_with(b".\\") {
|
} else if path.starts_with(b".\\") {
|
||||||
// \\.\path
|
// \\.\path
|
||||||
path = &path[2..];
|
path = &path[2..];
|
||||||
let slice = &path[.. path.position_elem(&b'\\').unwrap_or(path.len())];
|
let pos = path.iter().position(|&b| b == b'\\');
|
||||||
|
let slice = &path[..pos.unwrap_or(path.len())];
|
||||||
return Some(DeviceNS(u8_slice_as_os_str(slice)));
|
return Some(DeviceNS(u8_slice_as_os_str(slice)));
|
||||||
}
|
}
|
||||||
match parse_two_comps(path, is_sep_byte) {
|
match parse_two_comps(path, is_sep_byte) {
|
||||||
|
|
|
@ -56,7 +56,6 @@
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
#![feature(box_syntax)]
|
#![feature(box_syntax)]
|
||||||
#![feature(owned_ascii_ext)]
|
|
||||||
#![feature(path_ext)]
|
#![feature(path_ext)]
|
||||||
#![feature(rustc_private)]
|
#![feature(rustc_private)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub use self::Param::*;
|
||||||
use self::States::*;
|
use self::States::*;
|
||||||
use self::FormatState::*;
|
use self::FormatState::*;
|
||||||
use self::FormatOp::*;
|
use self::FormatOp::*;
|
||||||
use std::ascii::OwnedAsciiExt;
|
use std::ascii::AsciiExt;
|
||||||
use std::mem::replace;
|
use std::mem::replace;
|
||||||
use std::iter::repeat;
|
use std::iter::repeat;
|
||||||
|
|
||||||
|
@ -532,7 +532,7 @@ fn format(val: Param, op: FormatOp, flags: Flags) -> Result<Vec<u8> ,String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FormatHEX => {
|
FormatHEX => {
|
||||||
s = s.into_ascii_uppercase();
|
s = s.to_ascii_uppercase();
|
||||||
if flags.alternate {
|
if flags.alternate {
|
||||||
let s_ = replace(&mut s, vec!(b'0', b'X'));
|
let s_ = replace(&mut s, vec!(b'0', b'X'));
|
||||||
s.extend(s_);
|
s.extend(s_);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue