Auto merge of #69666 - JohnTitor:rollup-6nt3op0, r=JohnTitor
Rollup of 9 pull requests Successful merges: - #69213 (Improve documentation on iterators length) - #69609 (Remove `usable_size` APIs) - #69619 (more cleanups) - #69620 (doc(librustc_error_codes): add long error explanation for E0719) - #69626 (Toolstate: don't duplicate nightly tool list.) - #69628 (Fix a leak in `DiagnosticBuilder::into_diagnostic`.) - #69633 (Update my mailmap entry) - #69634 (clean up E0378 explanation) - #69637 (Don't convert Results to Options just for matching.) Failed merges: r? @ghost
This commit is contained in:
commit
a5de254862
37 changed files with 220 additions and 316 deletions
3
.mailmap
3
.mailmap
|
@ -5,7 +5,6 @@
|
|||
# email addresses.
|
||||
#
|
||||
|
||||
Aaron Power <theaaronepower@gmail.com> Erin Power <xampprocky@gmail.com>
|
||||
Aaron Todd <github@opprobrio.us>
|
||||
Abhishek Chanda <abhishek.becs@gmail.com> Abhishek Chanda <abhishek@cloudscaling.com>
|
||||
Adolfo Ochagavía <aochagavia92@gmail.com>
|
||||
|
@ -84,6 +83,8 @@ Eric Holk <eric.holk@gmail.com> <eholk@mozilla.com>
|
|||
Eric Holmes <eric@ejholmes.net>
|
||||
Eric Reed <ecreed@cs.washington.edu> <ereed@mozilla.com>
|
||||
Erick Tryzelaar <erick.tryzelaar@gmail.com> <etryzelaar@iqt.org>
|
||||
Erin Power <xampprocky@gmail.com> <theaaronepower@gmail.com>
|
||||
Erin Power <xampprocky@gmail.com> <Aaronepower@users.noreply.github.com>
|
||||
Esteban Küber <esteban@kuber.com.ar> <esteban@commure.com>
|
||||
Esteban Küber <esteban@kuber.com.ar> <estebank@users.noreply.github.com>
|
||||
Esteban Küber <esteban@kuber.com.ar> <github@kuber.com.ar>
|
||||
|
|
|
@ -443,7 +443,7 @@ fn change_toolstate(
|
|||
if new_state != state {
|
||||
eprintln!("The state of `{}` has changed from `{}` to `{}`", tool, state, new_state);
|
||||
if new_state < state {
|
||||
if !["rustc-guide", "miri", "embedded-book"].contains(&tool.as_str()) {
|
||||
if !NIGHTLY_TOOLS.iter().any(|(name, _path)| name == tool) {
|
||||
regressed = true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -165,8 +165,8 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
|
|||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl AllocRef for Global {
|
||||
#[inline]
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
|
||||
NonNull::new(alloc(layout)).ok_or(AllocErr)
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
NonNull::new(alloc(layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -180,13 +180,13 @@ unsafe impl AllocRef for Global {
|
|||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<NonNull<u8>, AllocErr> {
|
||||
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr).map(|p| (p, new_size))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
|
||||
NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr)
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,7 +201,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
|||
} else {
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
match Global.alloc(layout) {
|
||||
Ok(ptr) => ptr.as_ptr(),
|
||||
Ok((ptr, _)) => ptr.as_ptr(),
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use test::Bencher;
|
|||
fn allocate_zeroed() {
|
||||
unsafe {
|
||||
let layout = Layout::from_size_align(1024, 1).unwrap();
|
||||
let ptr =
|
||||
let (ptr, _) =
|
||||
Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
let mut i = ptr.cast::<u8>().as_ptr();
|
||||
|
|
|
@ -200,7 +200,7 @@ impl<T> Box<T> {
|
|||
let ptr = if layout.size() == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
|
||||
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
|
||||
};
|
||||
Box::from_raw(ptr.as_ptr())
|
||||
}
|
||||
|
@ -270,7 +270,7 @@ impl<T> Box<[T]> {
|
|||
let ptr = if layout.size() == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
|
||||
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
|
||||
};
|
||||
Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
RawVec::allocate_in(capacity, true, a)
|
||||
}
|
||||
|
||||
fn allocate_in(capacity: usize, zeroed: bool, mut a: A) -> Self {
|
||||
fn allocate_in(mut capacity: usize, zeroed: bool, mut a: A) -> Self {
|
||||
unsafe {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
|
||||
|
@ -87,7 +87,10 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
let layout = Layout::from_size_align(alloc_size, align).unwrap();
|
||||
let result = if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) };
|
||||
match result {
|
||||
Ok(ptr) => ptr.cast(),
|
||||
Ok((ptr, size)) => {
|
||||
capacity = size / elem_size;
|
||||
ptr.cast()
|
||||
}
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
};
|
||||
|
@ -280,7 +283,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
assert!(elem_size != 0, "capacity overflow");
|
||||
|
||||
let (new_cap, ptr) = match self.current_layout() {
|
||||
let (ptr, new_cap) = match self.current_layout() {
|
||||
Some(cur) => {
|
||||
// Since we guarantee that we never allocate more than
|
||||
// `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
|
||||
|
@ -297,7 +300,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
|
||||
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
|
||||
match ptr_res {
|
||||
Ok(ptr) => (new_cap, ptr),
|
||||
Ok((ptr, new_size)) => (ptr, new_size / elem_size),
|
||||
Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
|
||||
new_size,
|
||||
cur.align(),
|
||||
|
@ -310,7 +313,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
|
||||
let layout = Layout::array::<T>(new_cap).unwrap();
|
||||
match self.a.alloc(layout) {
|
||||
Ok(ptr) => (new_cap, ptr),
|
||||
Ok((ptr, new_size)) => (ptr, new_size / elem_size),
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
}
|
||||
|
@ -598,7 +601,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
let align = mem::align_of::<T>();
|
||||
let old_layout = Layout::from_size_align_unchecked(old_size, align);
|
||||
match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) {
|
||||
Ok(p) => self.ptr = p.cast().into(),
|
||||
Ok((ptr, _)) => self.ptr = ptr.cast().into(),
|
||||
Err(_) => {
|
||||
handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
|
||||
}
|
||||
|
@ -631,6 +634,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
fallibility: Fallibility,
|
||||
strategy: ReserveStrategy,
|
||||
) -> Result<(), TryReserveError> {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
|
||||
unsafe {
|
||||
// NOTE: we don't early branch on ZSTs here because we want this
|
||||
// to actually catch "asking for more than usize::MAX" in that case.
|
||||
|
@ -662,7 +667,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
let ptr = match (res, fallibility) {
|
||||
let (ptr, new_cap) = match (res, fallibility) {
|
||||
(Err(AllocErr), Infallible) => handle_alloc_error(new_layout),
|
||||
(Err(AllocErr), Fallible) => {
|
||||
return Err(TryReserveError::AllocError {
|
||||
|
@ -670,7 +675,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
|||
non_exhaustive: (),
|
||||
});
|
||||
}
|
||||
(Ok(ptr), _) => ptr,
|
||||
(Ok((ptr, new_size)), _) => (ptr, new_size / elem_size),
|
||||
};
|
||||
|
||||
self.ptr = ptr.cast().into();
|
||||
|
|
|
@ -20,7 +20,7 @@ fn allocator_param() {
|
|||
fuel: usize,
|
||||
}
|
||||
unsafe impl AllocRef for BoundedAlloc {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let size = layout.size();
|
||||
if size > self.fuel {
|
||||
return Err(AllocErr);
|
||||
|
|
|
@ -923,7 +923,7 @@ impl<T: ?Sized> Rc<T> {
|
|||
let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
|
||||
|
||||
// Allocate for the layout.
|
||||
let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
// Initialize the RcBox
|
||||
let inner = mem_to_rcbox(mem.as_ptr());
|
||||
|
|
|
@ -784,7 +784,7 @@ impl<T: ?Sized> Arc<T> {
|
|||
// reference (see #54908).
|
||||
let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
|
||||
|
||||
let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
// Initialize the ArcInner
|
||||
let inner = mem_to_arcinner(mem.as_ptr());
|
||||
|
|
|
@ -20,7 +20,7 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
|
|||
unsafe {
|
||||
let pointers: Vec<_> = (0..iterations)
|
||||
.map(|_| {
|
||||
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
|
||||
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0
|
||||
})
|
||||
.collect();
|
||||
for &ptr in &pointers {
|
||||
|
|
|
@ -11,12 +11,6 @@ use crate::num::NonZeroUsize;
|
|||
use crate::ptr::{self, NonNull};
|
||||
use crate::usize;
|
||||
|
||||
/// Represents the combination of a starting address and
|
||||
/// a total capacity of the returned block.
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[derive(Debug)]
|
||||
pub struct Excess(pub NonNull<u8>, pub usize);
|
||||
|
||||
const fn size_align<T>() -> (usize, usize) {
|
||||
(mem::size_of::<T>(), mem::align_of::<T>())
|
||||
}
|
||||
|
@ -593,13 +587,12 @@ pub unsafe trait GlobalAlloc {
|
|||
///
|
||||
/// * the starting address for that memory block was previously
|
||||
/// returned by a previous call to an allocation method (`alloc`,
|
||||
/// `alloc_zeroed`, `alloc_excess`) or reallocation method
|
||||
/// (`realloc`, `realloc_excess`), and
|
||||
/// `alloc_zeroed`) or reallocation method (`realloc`), and
|
||||
///
|
||||
/// * the memory block has not been subsequently deallocated, where
|
||||
/// blocks are deallocated either by being passed to a deallocation
|
||||
/// method (`dealloc`, `dealloc_one`, `dealloc_array`) or by being
|
||||
/// passed to a reallocation method (see above) that returns `Ok`.
|
||||
/// method (`dealloc`) or by being passed to a reallocation method
|
||||
/// (see above) that returns `Ok`.
|
||||
///
|
||||
/// A note regarding zero-sized types and zero-sized layouts: many
|
||||
/// methods in the `AllocRef` trait state that allocation requests
|
||||
|
@ -625,11 +618,9 @@ pub unsafe trait GlobalAlloc {
|
|||
///
|
||||
/// 2. The block's size must fall in the range `[use_min, use_max]`, where:
|
||||
///
|
||||
/// * `use_min` is `self.usable_size(layout).0`, and
|
||||
/// * `use_min` is `layout.size()`, and
|
||||
///
|
||||
/// * `use_max` is the capacity that was (or would have been)
|
||||
/// returned when (if) the block was allocated via a call to
|
||||
/// `alloc_excess` or `realloc_excess`.
|
||||
/// * `use_max` is the capacity that was returned.
|
||||
///
|
||||
/// Note that:
|
||||
///
|
||||
|
@ -643,6 +634,9 @@ pub unsafe trait GlobalAlloc {
|
|||
/// currently allocated via an allocator `a`, then it is legal to
|
||||
/// use that layout to deallocate it, i.e., `a.dealloc(ptr, k);`.
|
||||
///
|
||||
/// * if an allocator does not support overallocating, it is fine to
|
||||
/// simply return `layout.size()` as the allocated size.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The `AllocRef` trait is an `unsafe` trait for a number of reasons, and
|
||||
|
@ -671,8 +665,9 @@ pub unsafe trait AllocRef {
|
|||
// However in jemalloc for example,
|
||||
// `mallocx(0)` is documented as undefined behavior.)
|
||||
|
||||
/// Returns a pointer meeting the size and alignment guarantees of
|
||||
/// `layout`.
|
||||
/// On success, returns a pointer meeting the size and alignment
|
||||
/// guarantees of `layout` and the actual size of the allocated block,
|
||||
/// which must be greater than or equal to `layout.size()`.
|
||||
///
|
||||
/// If this method returns an `Ok(addr)`, then the `addr` returned
|
||||
/// will be non-null address pointing to a block of storage
|
||||
|
@ -709,7 +704,7 @@ pub unsafe trait AllocRef {
|
|||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr>;
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr>;
|
||||
|
||||
/// Deallocate the memory referenced by `ptr`.
|
||||
///
|
||||
|
@ -728,38 +723,31 @@ pub unsafe trait AllocRef {
|
|||
/// to allocate that block of memory.
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout);
|
||||
|
||||
// == ALLOCATOR-SPECIFIC QUANTITIES AND LIMITS ==
|
||||
// usable_size
|
||||
|
||||
/// Returns bounds on the guaranteed usable size of a successful
|
||||
/// allocation created with the specified `layout`.
|
||||
/// Behaves like `alloc`, but also ensures that the contents
|
||||
/// are set to zero before being returned.
|
||||
///
|
||||
/// In particular, if one has a memory block allocated via a given
|
||||
/// allocator `a` and layout `k` where `a.usable_size(k)` returns
|
||||
/// `(l, u)`, then one can pass that block to `a.dealloc()` with a
|
||||
/// layout in the size range [l, u].
|
||||
/// # Safety
|
||||
///
|
||||
/// (All implementors of `usable_size` must ensure that
|
||||
/// `l <= k.size() <= u`)
|
||||
/// This function is unsafe for the same reasons that `alloc` is.
|
||||
///
|
||||
/// Both the lower- and upper-bounds (`l` and `u` respectively)
|
||||
/// are provided, because an allocator based on size classes could
|
||||
/// misbehave if one attempts to deallocate a block without
|
||||
/// providing a correct value for its size (i.e., one within the
|
||||
/// range `[l, u]`).
|
||||
/// # Errors
|
||||
///
|
||||
/// Clients who wish to make use of excess capacity are encouraged
|
||||
/// to use the `alloc_excess` and `realloc_excess` instead, as
|
||||
/// this method is constrained to report conservative values that
|
||||
/// serve as valid bounds for *all possible* allocation method
|
||||
/// calls.
|
||||
/// Returning `Err` indicates that either memory is exhausted or
|
||||
/// `layout` does not meet allocator's size or alignment
|
||||
/// constraints, just as in `alloc`.
|
||||
///
|
||||
/// However, for clients that do not wish to track the capacity
|
||||
/// returned by `alloc_excess` locally, this method is likely to
|
||||
/// produce useful results.
|
||||
#[inline]
|
||||
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
|
||||
(layout.size(), layout.size())
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let size = layout.size();
|
||||
let result = self.alloc(layout);
|
||||
if let Ok((p, _)) = result {
|
||||
ptr::write_bytes(p.as_ptr(), 0, size);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// == METHODS FOR MEMORY REUSE ==
|
||||
|
@ -767,9 +755,10 @@ pub unsafe trait AllocRef {
|
|||
|
||||
/// Returns a pointer suitable for holding data described by
|
||||
/// a new layout with `layout`’s alignment and a size given
|
||||
/// by `new_size`. To
|
||||
/// accomplish this, this may extend or shrink the allocation
|
||||
/// referenced by `ptr` to fit the new layout.
|
||||
/// by `new_size` and the actual size of the allocated block.
|
||||
/// The latter is greater than or equal to `layout.size()`.
|
||||
/// To accomplish this, the allocator may extend or shrink
|
||||
/// the allocation referenced by `ptr` to fit the new layout.
|
||||
///
|
||||
/// If this returns `Ok`, then ownership of the memory block
|
||||
/// referenced by `ptr` has been transferred to this
|
||||
|
@ -824,23 +813,25 @@ pub unsafe trait AllocRef {
|
|||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<NonNull<u8>, AllocErr> {
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
|
||||
if new_size >= old_size {
|
||||
if let Ok(()) = self.grow_in_place(ptr, layout, new_size) {
|
||||
return Ok(ptr);
|
||||
if new_size > old_size {
|
||||
if let Ok(size) = self.grow_in_place(ptr, layout, new_size) {
|
||||
return Ok((ptr, size));
|
||||
}
|
||||
} else if new_size < old_size {
|
||||
if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) {
|
||||
return Ok(ptr);
|
||||
if let Ok(size) = self.shrink_in_place(ptr, layout, new_size) {
|
||||
return Ok((ptr, size));
|
||||
}
|
||||
} else {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
// otherwise, fall back on alloc + copy + dealloc.
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
let result = self.alloc(new_layout);
|
||||
if let Ok(new_ptr) = result {
|
||||
if let Ok((new_ptr, _)) = result {
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));
|
||||
self.dealloc(ptr, layout);
|
||||
}
|
||||
|
@ -877,174 +868,40 @@ pub unsafe trait AllocRef {
|
|||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<NonNull<u8>, AllocErr> {
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
|
||||
if new_size >= old_size {
|
||||
if let Ok(()) = self.grow_in_place_zeroed(ptr, layout, new_size) {
|
||||
return Ok(ptr);
|
||||
if new_size > old_size {
|
||||
if let Ok(size) = self.grow_in_place_zeroed(ptr, layout, new_size) {
|
||||
return Ok((ptr, size));
|
||||
}
|
||||
} else if new_size < old_size {
|
||||
if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) {
|
||||
return Ok(ptr);
|
||||
if let Ok(size) = self.shrink_in_place(ptr, layout, new_size) {
|
||||
return Ok((ptr, size));
|
||||
}
|
||||
} else {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
// otherwise, fall back on alloc + copy + dealloc.
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
let result = self.alloc_zeroed(new_layout);
|
||||
if let Ok(new_ptr) = result {
|
||||
if let Ok((new_ptr, _)) = result {
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));
|
||||
self.dealloc(ptr, layout);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Behaves like `alloc`, but also ensures that the contents
|
||||
/// are set to zero before being returned.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe for the same reasons that `alloc` is.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning `Err` indicates that either memory is exhausted or
|
||||
/// `layout` does not meet allocator's size or alignment
|
||||
/// constraints, just as in `alloc`.
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
|
||||
let size = layout.size();
|
||||
let p = self.alloc(layout);
|
||||
if let Ok(p) = p {
|
||||
ptr::write_bytes(p.as_ptr(), 0, size);
|
||||
}
|
||||
p
|
||||
}
|
||||
|
||||
/// Behaves like `alloc`, but also returns the whole size of
|
||||
/// the returned block. For some `layout` inputs, like arrays, this
|
||||
/// may include extra storage usable for additional data.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe for the same reasons that `alloc` is.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning `Err` indicates that either memory is exhausted or
|
||||
/// `layout` does not meet allocator's size or alignment
|
||||
/// constraints, just as in `alloc`.
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
|
||||
let usable_size = self.usable_size(&layout);
|
||||
self.alloc(layout).map(|p| Excess(p, usable_size.1))
|
||||
}
|
||||
|
||||
/// Behaves like `alloc`, but also returns the whole size of
|
||||
/// the returned block. For some `layout` inputs, like arrays, this
|
||||
/// may include extra storage usable for additional data.
|
||||
/// Also it ensures that the contents are set to zero before being returned.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe for the same reasons that `alloc` is.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning `Err` indicates that either memory is exhausted or
|
||||
/// `layout` does not meet allocator's size or alignment
|
||||
/// constraints, just as in `alloc`.
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn alloc_excess_zeroed(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
|
||||
let usable_size = self.usable_size(&layout);
|
||||
self.alloc_zeroed(layout).map(|p| Excess(p, usable_size.1))
|
||||
}
|
||||
|
||||
/// Behaves like `realloc`, but also returns the whole size of
|
||||
/// the returned block. For some `layout` inputs, like arrays, this
|
||||
/// may include extra storage usable for additional data.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe for the same reasons that `realloc` is.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning `Err` indicates that either memory is exhausted or
|
||||
/// `layout` does not meet allocator's size or alignment
|
||||
/// constraints, just as in `realloc`.
|
||||
///
|
||||
/// Clients wishing to abort computation in response to a
|
||||
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn realloc_excess(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<Excess, AllocErr> {
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
let usable_size = self.usable_size(&new_layout);
|
||||
self.realloc(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
|
||||
}
|
||||
|
||||
/// Behaves like `realloc`, but also returns the whole size of
|
||||
/// the returned block. For some `layout` inputs, like arrays, this
|
||||
/// may include extra storage usable for additional data.
|
||||
/// Also it ensures that the contents are set to zero before being returned.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe for the same reasons that `realloc` is.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning `Err` indicates that either memory is exhausted or
|
||||
/// `layout` does not meet allocator's size or alignment
|
||||
/// constraints, just as in `realloc`.
|
||||
///
|
||||
/// Clients wishing to abort computation in response to a
|
||||
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn realloc_excess_zeroed(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<Excess, AllocErr> {
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
let usable_size = self.usable_size(&new_layout);
|
||||
self.realloc_zeroed(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
|
||||
}
|
||||
|
||||
/// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
|
||||
///
|
||||
/// If this returns `Ok`, then the allocator has asserted that the
|
||||
/// memory block referenced by `ptr` now fits `new_size`, and thus can
|
||||
/// be used to carry data of a layout of that size and same alignment as
|
||||
/// `layout`. (The allocator is allowed to
|
||||
/// expend effort to accomplish this, such as extending the memory block to
|
||||
/// include successor blocks, or virtual memory tricks.)
|
||||
/// `layout`. The returned value is the new size of the allocated block.
|
||||
/// (The allocator is allowed to expend effort to accomplish this, such
|
||||
/// as extending the memory block to include successor blocks, or virtual
|
||||
/// memory tricks.)
|
||||
///
|
||||
/// Regardless of what this method returns, ownership of the
|
||||
/// memory block referenced by `ptr` has not been transferred, and
|
||||
|
@ -1072,18 +929,17 @@ pub unsafe trait AllocRef {
|
|||
/// function; clients are expected either to be able to recover from
|
||||
/// `grow_in_place` failures without aborting, or to fall back on
|
||||
/// another reallocation method before resorting to an abort.
|
||||
#[inline]
|
||||
unsafe fn grow_in_place(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<(), CannotReallocInPlace> {
|
||||
let _ = ptr; // this default implementation doesn't care about the actual address.
|
||||
debug_assert!(new_size >= layout.size());
|
||||
let (_l, u) = self.usable_size(&layout);
|
||||
// _l <= layout.size() [guaranteed by usable_size()]
|
||||
// layout.size() <= new_layout.size() [required by this method]
|
||||
if new_size <= u { Ok(()) } else { Err(CannotReallocInPlace) }
|
||||
) -> Result<usize, CannotReallocInPlace> {
|
||||
let _ = ptr;
|
||||
let _ = layout;
|
||||
let _ = new_size;
|
||||
Err(CannotReallocInPlace)
|
||||
}
|
||||
|
||||
/// Behaves like `grow_in_place`, but also ensures that the new
|
||||
|
@ -1108,10 +964,10 @@ pub unsafe trait AllocRef {
|
|||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<(), CannotReallocInPlace> {
|
||||
self.grow_in_place(ptr, layout, new_size)?;
|
||||
) -> Result<usize, CannotReallocInPlace> {
|
||||
let size = self.grow_in_place(ptr, layout, new_size)?;
|
||||
ptr.as_ptr().add(layout.size()).write_bytes(0, new_size - layout.size());
|
||||
Ok(())
|
||||
Ok(size)
|
||||
}
|
||||
|
||||
/// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
|
||||
|
@ -1119,7 +975,8 @@ pub unsafe trait AllocRef {
|
|||
/// If this returns `Ok`, then the allocator has asserted that the
|
||||
/// memory block referenced by `ptr` now fits `new_size`, and
|
||||
/// thus can only be used to carry data of that smaller
|
||||
/// layout. (The allocator is allowed to take advantage of this,
|
||||
/// layout. The returned value is the new size the allocated block.
|
||||
/// (The allocator is allowed to take advantage of this,
|
||||
/// carving off portions of the block for reuse elsewhere.) The
|
||||
/// truncated contents of the block within the smaller layout are
|
||||
/// unaltered, and ownership of block has not been transferred.
|
||||
|
@ -1153,17 +1010,16 @@ pub unsafe trait AllocRef {
|
|||
/// function; clients are expected either to be able to recover from
|
||||
/// `shrink_in_place` failures without aborting, or to fall back
|
||||
/// on another reallocation method before resorting to an abort.
|
||||
#[inline]
|
||||
unsafe fn shrink_in_place(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<(), CannotReallocInPlace> {
|
||||
let _ = ptr; // this default implementation doesn't care about the actual address.
|
||||
debug_assert!(new_size <= layout.size());
|
||||
let (l, _u) = self.usable_size(&layout);
|
||||
// layout.size() <= _u [guaranteed by usable_size()]
|
||||
// new_layout.size() <= layout.size() [required by this method]
|
||||
if l <= new_size { Ok(()) } else { Err(CannotReallocInPlace) }
|
||||
) -> Result<usize, CannotReallocInPlace> {
|
||||
let _ = ptr;
|
||||
let _ = layout;
|
||||
let _ = new_size;
|
||||
Err(CannotReallocInPlace)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
//! are elements, and once they've all been exhausted, will return `None` to
|
||||
//! indicate that iteration is finished. Individual iterators may choose to
|
||||
//! resume iteration, and so calling [`next`] again may or may not eventually
|
||||
//! start returning `Some(Item)` again at some point.
|
||||
//! start returning `Some(Item)` again at some point (for example, see [`TryIter`]).
|
||||
//!
|
||||
//! [`Iterator`]'s full definition includes a number of other methods as well,
|
||||
//! but they are default methods, built on top of [`next`], and so you get
|
||||
|
@ -56,6 +56,7 @@
|
|||
//! [`Iterator`]: trait.Iterator.html
|
||||
//! [`next`]: trait.Iterator.html#tymethod.next
|
||||
//! [`Option`]: ../../std/option/enum.Option.html
|
||||
//! [`TryIter`]: ../../std/sync/mpsc/struct.TryIter.html
|
||||
//!
|
||||
//! # The three forms of iteration
|
||||
//!
|
||||
|
|
|
@ -69,8 +69,10 @@
|
|||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub trait ExactSizeIterator: Iterator {
|
||||
/// Returns the exact number of times the iterator will iterate.
|
||||
/// Returns the exact length of the iterator.
|
||||
///
|
||||
/// The implementation ensures that the iterator will return exactly `len()`
|
||||
/// more times a `Some(T)` value, before returning `None`.
|
||||
/// This method has a default implementation, so you usually should not
|
||||
/// implement it directly. However, if you can provide a more efficient
|
||||
/// implementation, you can do so. See the [trait-level] docs for an
|
||||
|
|
|
@ -234,7 +234,7 @@ impl BoundNamesCollector {
|
|||
start = false;
|
||||
write!(fmt, "{}", r)?;
|
||||
}
|
||||
for (_, t) in &self.types {
|
||||
for t in self.types.values() {
|
||||
if !start {
|
||||
write!(fmt, ", ")?;
|
||||
}
|
||||
|
|
|
@ -1382,10 +1382,8 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
|
||||
// Write down the order of our locals that will be promoted to the prefix.
|
||||
{
|
||||
let mut idx = 0u32;
|
||||
for local in ineligible_locals.iter() {
|
||||
assignments[local] = Ineligible(Some(idx));
|
||||
idx += 1;
|
||||
for (idx, local) in ineligible_locals.iter().enumerate() {
|
||||
assignments[local] = Ineligible(Some(idx as u32));
|
||||
}
|
||||
}
|
||||
debug!("generator saved local assignments: {:?}", assignments);
|
||||
|
|
|
@ -188,7 +188,7 @@ fn parse_args<'a>(
|
|||
let mut err = ecx
|
||||
.struct_span_err(e.span, "positional arguments cannot follow named arguments");
|
||||
err.span_label(e.span, "positional arguments must be before named arguments");
|
||||
for (_, pos) in &names {
|
||||
for pos in names.values() {
|
||||
err.span_label(args[*pos].span, "named argument");
|
||||
}
|
||||
err.emit();
|
||||
|
|
|
@ -395,6 +395,7 @@ E0714: include_str!("./error_codes/E0714.md"),
|
|||
E0715: include_str!("./error_codes/E0715.md"),
|
||||
E0716: include_str!("./error_codes/E0716.md"),
|
||||
E0718: include_str!("./error_codes/E0718.md"),
|
||||
E0719: include_str!("./error_codes/E0719.md"),
|
||||
E0720: include_str!("./error_codes/E0720.md"),
|
||||
E0723: include_str!("./error_codes/E0723.md"),
|
||||
E0725: include_str!("./error_codes/E0725.md"),
|
||||
|
@ -605,7 +606,6 @@ E0748: include_str!("./error_codes/E0748.md"),
|
|||
E0710, // an unknown tool name found in scoped lint
|
||||
E0711, // a feature has been declared with conflicting stability attributes
|
||||
E0717, // rustc_promotable without stability attribute
|
||||
E0719, // duplicate values for associated type binding
|
||||
// E0721, // `await` keyword
|
||||
E0722, // Malformed `#[optimize]` attribute
|
||||
E0724, // `#[ffi_returns_twice]` is only allowed in foreign functions
|
||||
|
|
|
@ -1,10 +1,28 @@
|
|||
The `DispatchFromDyn` trait was implemented on something which is not a pointer
|
||||
or a newtype wrapper around a pointer.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile-fail,E0378
|
||||
#![feature(dispatch_from_dyn)]
|
||||
use std::ops::DispatchFromDyn;
|
||||
|
||||
struct WrapperExtraField<T> {
|
||||
ptr: T,
|
||||
extra_stuff: i32,
|
||||
}
|
||||
|
||||
impl<T, U> DispatchFromDyn<WrapperExtraField<U>> for WrapperExtraField<T>
|
||||
where
|
||||
T: DispatchFromDyn<U>,
|
||||
{}
|
||||
```
|
||||
|
||||
The `DispatchFromDyn` trait currently can only be implemented for
|
||||
builtin pointer types and structs that are newtype wrappers around them
|
||||
— that is, the struct must have only one field (except for`PhantomData`),
|
||||
and that field must itself implement `DispatchFromDyn`.
|
||||
|
||||
Examples:
|
||||
|
||||
```
|
||||
#![feature(dispatch_from_dyn, unsize)]
|
||||
use std::{
|
||||
|
@ -20,6 +38,8 @@ where
|
|||
{}
|
||||
```
|
||||
|
||||
Another example:
|
||||
|
||||
```
|
||||
#![feature(dispatch_from_dyn)]
|
||||
use std::{
|
||||
|
@ -37,21 +57,3 @@ where
|
|||
T: DispatchFromDyn<U>,
|
||||
{}
|
||||
```
|
||||
|
||||
Example of illegal `DispatchFromDyn` implementation
|
||||
(illegal because of extra field)
|
||||
|
||||
```compile-fail,E0378
|
||||
#![feature(dispatch_from_dyn)]
|
||||
use std::ops::DispatchFromDyn;
|
||||
|
||||
struct WrapperExtraField<T> {
|
||||
ptr: T,
|
||||
extra_stuff: i32,
|
||||
}
|
||||
|
||||
impl<T, U> DispatchFromDyn<WrapperExtraField<U>> for WrapperExtraField<T>
|
||||
where
|
||||
T: DispatchFromDyn<U>,
|
||||
{}
|
||||
```
|
||||
|
|
35
src/librustc_error_codes/error_codes/E0719.md
Normal file
35
src/librustc_error_codes/error_codes/E0719.md
Normal file
|
@ -0,0 +1,35 @@
|
|||
The value for an associated type has already been specified.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0719
|
||||
#![feature(associated_type_bounds)]
|
||||
|
||||
trait FooTrait {}
|
||||
trait BarTrait {}
|
||||
|
||||
// error: associated type `Item` in trait `Iterator` is specified twice
|
||||
struct Foo<T: Iterator<Item: FooTrait, Item: BarTrait>> { f: T }
|
||||
```
|
||||
|
||||
`Item` in trait `Iterator` cannot be specified multiple times for struct `Foo`.
|
||||
To fix this, create a new trait that is a combination of the desired traits and
|
||||
specify the associated type with the new trait.
|
||||
|
||||
Corrected example:
|
||||
|
||||
```
|
||||
#![feature(associated_type_bounds)]
|
||||
|
||||
trait FooTrait {}
|
||||
trait BarTrait {}
|
||||
trait FooBarTrait: FooTrait + BarTrait {}
|
||||
|
||||
struct Foo<T: Iterator<Item: FooBarTrait>> { f: T }
|
||||
```
|
||||
|
||||
For more information about associated types, see [the book][bk-at]. For more
|
||||
information on associated type bounds, see [RFC 2289][rfc-2289].
|
||||
|
||||
[bk-at]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types
|
||||
[rfc-2289]: https://rust-lang.github.io/rfcs/2289-associated-type-bounds.html
|
|
@ -136,12 +136,11 @@ impl<'a> DiagnosticBuilder<'a> {
|
|||
|
||||
let handler = self.0.handler;
|
||||
|
||||
// We need to use `ptr::read` because `DiagnosticBuilder` implements `Drop`.
|
||||
let diagnostic;
|
||||
unsafe {
|
||||
diagnostic = std::ptr::read(&self.0.diagnostic);
|
||||
std::mem::forget(self);
|
||||
};
|
||||
// We must use `Level::Cancelled` for `dummy` to avoid an ICE about an
|
||||
// unused diagnostic.
|
||||
let dummy = Diagnostic::new(Level::Cancelled, "");
|
||||
let diagnostic = std::mem::replace(&mut self.0.diagnostic, dummy);
|
||||
|
||||
// Logging here is useful to help track down where in logs an error was
|
||||
// actually emitted.
|
||||
debug!("buffer: diagnostic={:?}", diagnostic);
|
||||
|
|
|
@ -1574,9 +1574,9 @@ impl EmitterWriter {
|
|||
|
||||
let line_start = sm.lookup_char_pos(parts[0].span.lo()).line;
|
||||
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
|
||||
let mut line_pos = 0;
|
||||
let mut lines = complete.lines();
|
||||
for line in lines.by_ref().take(MAX_SUGGESTION_HIGHLIGHT_LINES) {
|
||||
for (line_pos, line) in lines.by_ref().take(MAX_SUGGESTION_HIGHLIGHT_LINES).enumerate()
|
||||
{
|
||||
// Print the span column to avoid confusion
|
||||
buffer.puts(
|
||||
row_num,
|
||||
|
@ -1587,7 +1587,6 @@ impl EmitterWriter {
|
|||
// print the suggestion
|
||||
draw_col_separator(&mut buffer, row_num, max_line_num_len + 1);
|
||||
buffer.append(row_num, line, Style::NoStyle);
|
||||
line_pos += 1;
|
||||
row_num += 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -679,15 +679,15 @@ impl Crate<'_> {
|
|||
where
|
||||
V: itemlikevisit::ItemLikeVisitor<'hir>,
|
||||
{
|
||||
for (_, item) in &self.items {
|
||||
for item in self.items.values() {
|
||||
visitor.visit_item(item);
|
||||
}
|
||||
|
||||
for (_, trait_item) in &self.trait_items {
|
||||
for trait_item in self.trait_items.values() {
|
||||
visitor.visit_trait_item(trait_item);
|
||||
}
|
||||
|
||||
for (_, impl_item) in &self.impl_items {
|
||||
for impl_item in self.impl_items.values() {
|
||||
visitor.visit_impl_item(impl_item);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -751,7 +751,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
|
|||
let dummy_source = graph.add_node(());
|
||||
let dummy_sink = graph.add_node(());
|
||||
|
||||
for (constraint, _) in &self.data.constraints {
|
||||
for constraint in self.data.constraints.keys() {
|
||||
match *constraint {
|
||||
Constraint::VarSubVar(a_id, b_id) => {
|
||||
graph.add_edge(
|
||||
|
|
|
@ -34,7 +34,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
|
|||
assert!(self.in_snapshot());
|
||||
|
||||
// Go through each placeholder that we created.
|
||||
for (_, &placeholder_region) in placeholder_map {
|
||||
for &placeholder_region in placeholder_map.values() {
|
||||
// Find the universe this placeholder inhabits.
|
||||
let placeholder = match placeholder_region {
|
||||
ty::RePlaceholder(p) => p,
|
||||
|
|
|
@ -1252,7 +1252,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
|
|||
// this may resolve to either a value or a type, but for documentation
|
||||
// purposes it's good enough to just favor one over the other.
|
||||
self.r.per_ns(|this, ns| {
|
||||
if let Some(binding) = source_bindings[ns].get().ok() {
|
||||
if let Ok(binding) = source_bindings[ns].get() {
|
||||
this.import_res_map.entry(directive.id).or_default()[ns] = Some(binding.res());
|
||||
}
|
||||
});
|
||||
|
@ -1293,7 +1293,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
|
|||
let mut redundant_span = PerNS { value_ns: None, type_ns: None, macro_ns: None };
|
||||
|
||||
self.r.per_ns(|this, ns| {
|
||||
if let Some(binding) = source_bindings[ns].get().ok() {
|
||||
if let Ok(binding) = source_bindings[ns].get() {
|
||||
if binding.res() == Res::Err {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -347,7 +347,7 @@ fn krate(tcx: TyCtxt<'_>) -> NamedRegionMap {
|
|||
lifetime_uses: &mut Default::default(),
|
||||
missing_named_lifetime_spots: vec![],
|
||||
};
|
||||
for (_, item) in &krate.items {
|
||||
for item in krate.items.values() {
|
||||
visitor.visit_item(item);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1652,7 +1652,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
|
||||
for (projection_bound, _) in &bounds.projection_bounds {
|
||||
for (_, def_ids) in &mut associated_types {
|
||||
for def_ids in associated_types.values_mut() {
|
||||
def_ids.remove(&projection_bound.projection_def_id());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -526,7 +526,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
// we may want to suggest removing a `&`.
|
||||
if !sm.span_to_filename(expr.span).is_real() {
|
||||
if let Ok(code) = sm.span_to_snippet(sp) {
|
||||
if code.chars().next() == Some('&') {
|
||||
if code.starts_with('&') {
|
||||
return Some((
|
||||
sp,
|
||||
"consider removing the borrow",
|
||||
|
|
|
@ -137,13 +137,15 @@ pub struct System;
|
|||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl AllocRef for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
|
||||
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
|
||||
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout))
|
||||
.ok_or(AllocErr)
|
||||
.map(|p| (p, layout.size()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -157,8 +159,10 @@ unsafe impl AllocRef for System {
|
|||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
) -> Result<NonNull<u8>, AllocErr> {
|
||||
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)
|
||||
.map(|p| (p, new_size))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -901,7 +901,7 @@ impl ToSocketAddrs for str {
|
|||
type Iter = vec::IntoIter<SocketAddr>;
|
||||
fn to_socket_addrs(&self) -> io::Result<vec::IntoIter<SocketAddr>> {
|
||||
// try to parse as a regular SocketAddr first
|
||||
if let Some(addr) = self.parse().ok() {
|
||||
if let Ok(addr) = self.parse() {
|
||||
return Ok(vec![addr].into_iter());
|
||||
}
|
||||
|
||||
|
|
|
@ -280,7 +280,7 @@ impl Socket {
|
|||
};
|
||||
let mut timeout = libc::timeval {
|
||||
tv_sec: secs,
|
||||
tv_usec: (dur.subsec_nanos() / 1000) as libc::suseconds_t,
|
||||
tv_usec: dur.subsec_micros() as libc::suseconds_t,
|
||||
};
|
||||
if timeout.tv_sec == 0 && timeout.tv_usec == 0 {
|
||||
timeout.tv_usec = 1;
|
||||
|
|
|
@ -37,7 +37,7 @@ fn main() {
|
|||
unsafe {
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
let ptr = Global.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = Global.alloc(layout.clone()).unwrap();
|
||||
helper::work_with(&ptr);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
|
@ -49,7 +49,7 @@ fn main() {
|
|||
drop(s);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
|
||||
let ptr = System.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = System.alloc(layout.clone()).unwrap();
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
helper::work_with(&ptr);
|
||||
System.dealloc(ptr, layout);
|
||||
|
|
|
@ -20,13 +20,13 @@ fn main() {
|
|||
let n = GLOBAL.0.load(Ordering::SeqCst);
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
let ptr = Global.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = Global.alloc(layout.clone()).unwrap();
|
||||
helper::work_with(&ptr);
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
|
||||
let ptr = System.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = System.alloc(layout.clone()).unwrap();
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
helper::work_with(&ptr);
|
||||
System.dealloc(ptr, layout);
|
||||
|
|
|
@ -728,3 +728,4 @@ LL | type TADyn3 = dyn Iterator<Item: 'static, Item: 'static>;
|
|||
|
||||
error: aborting due to 96 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0719`.
|
||||
|
|
|
@ -16,3 +16,4 @@ LL | fn test() -> Box<dyn Iterator<Item = (), Item = Unit>> {
|
|||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0719`.
|
||||
|
|
|
@ -41,13 +41,13 @@ unsafe fn test_triangle() -> bool {
|
|||
println!("allocate({:?})", layout);
|
||||
}
|
||||
|
||||
let ret = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
if PRINT {
|
||||
println!("allocate({:?}) = {:?}", layout, ret);
|
||||
println!("allocate({:?}) = {:?}", layout, ptr);
|
||||
}
|
||||
|
||||
ret.cast().as_ptr()
|
||||
ptr.cast().as_ptr()
|
||||
}
|
||||
|
||||
unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
|
||||
|
@ -63,16 +63,16 @@ unsafe fn test_triangle() -> bool {
|
|||
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
|
||||
}
|
||||
|
||||
let ret = Global.realloc(NonNull::new_unchecked(ptr), old, new.size())
|
||||
let (ptr, _) = Global.realloc(NonNull::new_unchecked(ptr), old, new.size())
|
||||
.unwrap_or_else(|_| handle_alloc_error(
|
||||
Layout::from_size_align_unchecked(new.size(), old.align())
|
||||
));
|
||||
|
||||
if PRINT {
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}",
|
||||
ptr, old, new, ret);
|
||||
ptr, old, new, ptr);
|
||||
}
|
||||
ret.cast().as_ptr()
|
||||
ptr.cast().as_ptr()
|
||||
}
|
||||
|
||||
fn idx_to_size(i: usize) -> usize { (i+1) * 10 }
|
||||
|
|
|
@ -24,29 +24,29 @@ struct Ccx {
|
|||
x: isize
|
||||
}
|
||||
|
||||
fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> {
|
||||
fn alloc(_bcx: &arena) -> &Bcx<'_> {
|
||||
unsafe {
|
||||
let layout = Layout::new::<Bcx>();
|
||||
let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
&*(ptr.as_ptr() as *const _)
|
||||
}
|
||||
}
|
||||
|
||||
fn h<'a>(bcx : &'a Bcx<'a>) -> &'a Bcx<'a> {
|
||||
fn h<'a>(bcx: &'a Bcx<'a>) -> &'a Bcx<'a> {
|
||||
return alloc(bcx.fcx.arena);
|
||||
}
|
||||
|
||||
fn g(fcx : &Fcx) {
|
||||
let bcx = Bcx { fcx: fcx };
|
||||
fn g(fcx: &Fcx) {
|
||||
let bcx = Bcx { fcx };
|
||||
let bcx2 = h(&bcx);
|
||||
unsafe {
|
||||
Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
|
||||
}
|
||||
}
|
||||
|
||||
fn f(ccx : &Ccx) {
|
||||
fn f(ccx: &Ccx) {
|
||||
let a = arena(());
|
||||
let fcx = Fcx { arena: &a, ccx: ccx };
|
||||
let fcx = Fcx { arena: &a, ccx };
|
||||
return g(&fcx);
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue