// ignore-tidy-filelength //! Slice management and manipulation. //! //! For more details see [`std::slice`]. //! //! [`std::slice`]: ../../std/slice/index.html #![stable(feature = "rust1", since = "1.0.0")] // How this module is organized. // // The library infrastructure for slices is fairly messy. There's // a lot of stuff defined here. Let's keep it clean. // // The layout of this file is thus: // // * Inherent methods. This is where most of the slice API resides. // * Implementations of a few common traits with important slice ops. // * Definitions of a bunch of iterators. // * Free functions. // * The `raw` and `bytes` submodules. // * Boilerplate trait implementations. use crate::cmp; use crate::cmp::Ordering::{self, Equal, Greater, Less}; use crate::fmt; use crate::intrinsics::{assume, exact_div, is_aligned_and_not_null, unchecked_sub}; use crate::iter::*; use crate::marker::{self, Copy, Send, Sized, Sync}; use crate::mem; use crate::ops::{self, Bound, FnMut, Range, RangeBounds}; use crate::option::Option; use crate::option::Option::{None, Some}; use crate::ptr::{self, NonNull}; use crate::result::Result; use crate::result::Result::{Err, Ok}; #[unstable( feature = "slice_internals", issue = "none", reason = "exposed from core to be reused in std; use the memchr crate" )] /// Pure rust memchr implementation, taken from rust-memchr pub mod memchr; mod rotate; mod sort; // // Extension traits // #[lang = "slice"] #[cfg(not(test))] impl [T] { /// Returns the number of elements in the slice. /// /// # Examples /// /// ``` /// let a = [1, 2, 3]; /// assert_eq!(a.len(), 3); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_len", since = "1.32.0")] #[inline] // SAFETY: const sound because we transmute out the length field as a usize (which it must be) #[allow(unused_attributes)] #[allow_internal_unstable(const_fn_union)] pub const fn len(&self) -> usize { // SAFETY: this is safe because `&[T]` and `FatPtr` have the same layout. // Only `std` can make this guarantee. unsafe { crate::ptr::Repr { rust: self }.raw.len } } /// Returns `true` if the slice has a length of 0. /// /// # Examples /// /// ``` /// let a = [1, 2, 3]; /// assert!(!a.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.32.0")] #[inline] pub const fn is_empty(&self) -> bool { self.len() == 0 } /// Returns the first element of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let v = [10, 40, 30]; /// assert_eq!(Some(&10), v.first()); /// /// let w: &[i32] = &[]; /// assert_eq!(None, w.first()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn first(&self) -> Option<&T> { if let [first, ..] = self { Some(first) } else { None } } /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let x = &mut [0, 1, 2]; /// /// if let Some(first) = x.first_mut() { /// *first = 5; /// } /// assert_eq!(x, &[5, 1, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn first_mut(&mut self) -> Option<&mut T> { if let [first, ..] = self { Some(first) } else { None } } /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let x = &[0, 1, 2]; /// /// if let Some((first, elements)) = x.split_first() { /// assert_eq!(first, &0); /// assert_eq!(elements, &[1, 2]); /// } /// ``` #[stable(feature = "slice_splits", since = "1.5.0")] #[inline] pub fn split_first(&self) -> Option<(&T, &[T])> { if let [first, tail @ ..] = self { Some((first, tail)) } else { None } } /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let x = &mut [0, 1, 2]; /// /// if let Some((first, elements)) = x.split_first_mut() { /// *first = 3; /// elements[0] = 4; /// elements[1] = 5; /// } /// assert_eq!(x, &[3, 4, 5]); /// ``` #[stable(feature = "slice_splits", since = "1.5.0")] #[inline] pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> { if let [first, tail @ ..] = self { Some((first, tail)) } else { None } } /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let x = &[0, 1, 2]; /// /// if let Some((last, elements)) = x.split_last() { /// assert_eq!(last, &2); /// assert_eq!(elements, &[0, 1]); /// } /// ``` #[stable(feature = "slice_splits", since = "1.5.0")] #[inline] pub fn split_last(&self) -> Option<(&T, &[T])> { if let [init @ .., last] = self { Some((last, init)) } else { None } } /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let x = &mut [0, 1, 2]; /// /// if let Some((last, elements)) = x.split_last_mut() { /// *last = 3; /// elements[0] = 4; /// elements[1] = 5; /// } /// assert_eq!(x, &[4, 5, 3]); /// ``` #[stable(feature = "slice_splits", since = "1.5.0")] #[inline] pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> { if let [init @ .., last] = self { Some((last, init)) } else { None } } /// Returns the last element of the slice, or `None` if it is empty. /// /// # Examples /// /// ``` /// let v = [10, 40, 30]; /// assert_eq!(Some(&30), v.last()); /// /// let w: &[i32] = &[]; /// assert_eq!(None, w.last()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn last(&self) -> Option<&T> { if let [.., last] = self { Some(last) } else { None } } /// Returns a mutable pointer to the last item in the slice. /// /// # Examples /// /// ``` /// let x = &mut [0, 1, 2]; /// /// if let Some(last) = x.last_mut() { /// *last = 10; /// } /// assert_eq!(x, &[0, 1, 10]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn last_mut(&mut self) -> Option<&mut T> { if let [.., last] = self { Some(last) } else { None } } /// Returns a reference to an element or subslice depending on the type of /// index. /// /// - If given a position, returns a reference to the element at that /// position or `None` if out of bounds. /// - If given a range, returns the subslice corresponding to that range, /// or `None` if out of bounds. /// /// # Examples /// /// ``` /// let v = [10, 40, 30]; /// assert_eq!(Some(&40), v.get(1)); /// assert_eq!(Some(&[10, 40][..]), v.get(0..2)); /// assert_eq!(None, v.get(3)); /// assert_eq!(None, v.get(0..4)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn get(&self, index: I) -> Option<&I::Output> where I: SliceIndex, { index.get(self) } /// Returns a mutable reference to an element or subslice depending on the /// type of index (see [`get`]) or `None` if the index is out of bounds. /// /// [`get`]: #method.get /// /// # Examples /// /// ``` /// let x = &mut [0, 1, 2]; /// /// if let Some(elem) = x.get_mut(1) { /// *elem = 42; /// } /// assert_eq!(x, &[0, 42, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn get_mut(&mut self, index: I) -> Option<&mut I::Output> where I: SliceIndex, { index.get_mut(self) } /// Returns a reference to an element or subslice, without doing bounds /// checking. /// /// For a safe alternative see [`get`]. /// /// # Safety /// /// Calling this method with an out-of-bounds index is *[undefined behavior]* /// even if the resulting reference is not used. /// /// [`get`]: #method.get /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html /// /// # Examples /// /// ``` /// let x = &[1, 2, 4]; /// /// unsafe { /// assert_eq!(x.get_unchecked(1), &2); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub unsafe fn get_unchecked(&self, index: I) -> &I::Output where I: SliceIndex, { // SAFETY: the caller must uphold most of the safety requirements for `get_unchecked`; // the slice is dereferencable because `self` is a safe reference. // The returned pointer is safe because impls of `SliceIndex` have to guarantee that it is. unsafe { &*index.get_unchecked(self) } } /// Returns a mutable reference to an element or subslice, without doing /// bounds checking. /// /// For a safe alternative see [`get_mut`]. /// /// # Safety /// /// Calling this method with an out-of-bounds index is *[undefined behavior]* /// even if the resulting reference is not used. /// /// [`get_mut`]: #method.get_mut /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html /// /// # Examples /// /// ``` /// let x = &mut [1, 2, 4]; /// /// unsafe { /// let elem = x.get_unchecked_mut(1); /// *elem = 13; /// } /// assert_eq!(x, &[1, 13, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub unsafe fn get_unchecked_mut(&mut self, index: I) -> &mut I::Output where I: SliceIndex, { // SAFETY: the caller must uphold the safety requirements for `get_unchecked_mut`; // the slice is dereferencable because `self` is a safe reference. // The returned pointer is safe because impls of `SliceIndex` have to guarantee that it is. unsafe { &mut *index.get_unchecked_mut(self) } } /// Converts a range over this slice to [`Range`]. /// /// The returned range is safe to pass to [`get_unchecked`] and [`get_unchecked_mut`]. /// /// [`get_unchecked`]: #method.get_unchecked /// [`get_unchecked_mut`]: #method.get_unchecked_mut /// /// # Panics /// /// Panics if the range is out of bounds. /// /// # Examples /// /// ``` /// #![feature(slice_check_range)] /// /// let v = [10, 40, 30]; /// assert_eq!(1..2, v.check_range(1..2)); /// assert_eq!(0..2, v.check_range(..2)); /// assert_eq!(1..3, v.check_range(1..)); /// ``` /// /// Panics when [`Index::index`] would panic: /// /// ```should_panic /// #![feature(slice_check_range)] /// /// [10, 40, 30].check_range(2..1); /// ``` /// /// ```should_panic /// #![feature(slice_check_range)] /// /// [10, 40, 30].check_range(1..4); /// ``` /// /// ```should_panic /// #![feature(slice_check_range)] /// /// [10, 40, 30].check_range(1..=usize::MAX); /// ``` /// /// [`Index::index`]: ops::Index::index #[track_caller] #[unstable(feature = "slice_check_range", issue = "none")] pub fn check_range>(&self, range: R) -> Range { let start = match range.start_bound() { Bound::Included(&start) => start, Bound::Excluded(start) => { start.checked_add(1).unwrap_or_else(|| slice_start_index_overflow_fail()) } Bound::Unbounded => 0, }; let len = self.len(); let end = match range.end_bound() { Bound::Included(end) => { end.checked_add(1).unwrap_or_else(|| slice_end_index_overflow_fail()) } Bound::Excluded(&end) => end, Bound::Unbounded => len, }; if start > end { slice_index_order_fail(start, end); } if end > len { slice_end_index_len_fail(end, len); } Range { start, end } } /// Returns a raw pointer to the slice's buffer. /// /// The caller must ensure that the slice outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// /// The caller must also ensure that the memory the pointer (non-transitively) points to /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`]. /// /// Modifying the container referenced by this slice may cause its buffer /// to be reallocated, which would also make any pointers to it invalid. /// /// # Examples /// /// ``` /// let x = &[1, 2, 4]; /// let x_ptr = x.as_ptr(); /// /// unsafe { /// for i in 0..x.len() { /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i)); /// } /// } /// ``` /// /// [`as_mut_ptr`]: #method.as_mut_ptr #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] #[inline] pub const fn as_ptr(&self) -> *const T { self as *const [T] as *const T } /// Returns an unsafe mutable pointer to the slice's buffer. /// /// The caller must ensure that the slice outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// /// Modifying the container referenced by this slice may cause its buffer /// to be reallocated, which would also make any pointers to it invalid. /// /// # Examples /// /// ``` /// let x = &mut [1, 2, 4]; /// let x_ptr = x.as_mut_ptr(); /// /// unsafe { /// for i in 0..x.len() { /// *x_ptr.add(i) += 2; /// } /// } /// assert_eq!(x, &[3, 4, 6]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn as_mut_ptr(&mut self) -> *mut T { self as *mut [T] as *mut T } /// Returns the two raw pointers spanning the slice. /// /// The returned range is half-open, which means that the end pointer /// points *one past* the last element of the slice. This way, an empty /// slice is represented by two equal pointers, and the difference between /// the two pointers represents the size of the slice. /// /// See [`as_ptr`] for warnings on using these pointers. The end pointer /// requires extra caution, as it does not point to a valid element in the /// slice. /// /// This function is useful for interacting with foreign interfaces which /// use two pointers to refer to a range of elements in memory, as is /// common in C++. /// /// It can also be useful to check if a pointer to an element refers to an /// element of this slice: /// /// ``` /// #![feature(slice_ptr_range)] /// /// let a = [1, 2, 3]; /// let x = &a[1] as *const _; /// let y = &5 as *const _; /// /// assert!(a.as_ptr_range().contains(&x)); /// assert!(!a.as_ptr_range().contains(&y)); /// ``` /// /// [`as_ptr`]: #method.as_ptr #[unstable(feature = "slice_ptr_range", issue = "65807")] #[inline] pub fn as_ptr_range(&self) -> Range<*const T> { let start = self.as_ptr(); // SAFETY: The `add` here is safe, because: // // - Both pointers are part of the same object, as pointing directly // past the object also counts. // // - The size of the slice is never larger than isize::MAX bytes, as // noted here: // - https://github.com/rust-lang/unsafe-code-guidelines/issues/102#issuecomment-473340447 // - https://doc.rust-lang.org/reference/behavior-considered-undefined.html // - https://doc.rust-lang.org/core/slice/fn.from_raw_parts.html#safety // (This doesn't seem normative yet, but the very same assumption is // made in many places, including the Index implementation of slices.) // // - There is no wrapping around involved, as slices do not wrap past // the end of the address space. // // See the documentation of pointer::add. let end = unsafe { start.add(self.len()) }; start..end } /// Returns the two unsafe mutable pointers spanning the slice. /// /// The returned range is half-open, which means that the end pointer /// points *one past* the last element of the slice. This way, an empty /// slice is represented by two equal pointers, and the difference between /// the two pointers represents the size of the slice. /// /// See [`as_mut_ptr`] for warnings on using these pointers. The end /// pointer requires extra caution, as it does not point to a valid element /// in the slice. /// /// This function is useful for interacting with foreign interfaces which /// use two pointers to refer to a range of elements in memory, as is /// common in C++. /// /// [`as_mut_ptr`]: #method.as_mut_ptr #[unstable(feature = "slice_ptr_range", issue = "65807")] #[inline] pub fn as_mut_ptr_range(&mut self) -> Range<*mut T> { let start = self.as_mut_ptr(); // SAFETY: See as_ptr_range() above for why `add` here is safe. let end = unsafe { start.add(self.len()) }; start..end } /// Swaps two elements in the slice. /// /// # Arguments /// /// * a - The index of the first element /// * b - The index of the second element /// /// # Panics /// /// Panics if `a` or `b` are out of bounds. /// /// # Examples /// /// ``` /// let mut v = ["a", "b", "c", "d"]; /// v.swap(1, 3); /// assert!(v == ["a", "d", "c", "b"]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn swap(&mut self, a: usize, b: usize) { // Can't take two mutable loans from one vector, so instead just cast // them to their raw pointers to do the swap. let pa: *mut T = &mut self[a]; let pb: *mut T = &mut self[b]; // SAFETY: `pa` and `pb` have been created from safe mutable references and refer // to elements in the slice and therefore are guaranteed to be valid and aligned. // Note that accessing the elements behind `a` and `b` is checked and will // panic when out of bounds. unsafe { ptr::swap(pa, pb); } } /// Reverses the order of elements in the slice, in place. /// /// # Examples /// /// ``` /// let mut v = [1, 2, 3]; /// v.reverse(); /// assert!(v == [3, 2, 1]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn reverse(&mut self) { let mut i: usize = 0; let ln = self.len(); // For very small types, all the individual reads in the normal // path perform poorly. We can do better, given efficient unaligned // load/store, by loading a larger chunk and reversing a register. // Ideally LLVM would do this for us, as it knows better than we do // whether unaligned reads are efficient (since that changes between // different ARM versions, for example) and what the best chunk size // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls // the loop, so we need to do this ourselves. (Hypothesis: reverse // is troublesome because the sides can be aligned differently -- // will be, when the length is odd -- so there's no way of emitting // pre- and postludes to use fully-aligned SIMD in the middle.) let fast_unaligned = cfg!(any(target_arch = "x86", target_arch = "x86_64")); if fast_unaligned && mem::size_of::() == 1 { // Use the llvm.bswap intrinsic to reverse u8s in a usize let chunk = mem::size_of::(); while i + chunk - 1 < ln / 2 { // SAFETY: There are several things to check here: // // - Note that `chunk` is either 4 or 8 due to the cfg check // above. So `chunk - 1` is positive. // - Indexing with index `i` is fine as the loop check guarantees // `i + chunk - 1 < ln / 2` // <=> `i < ln / 2 - (chunk - 1) < ln / 2 < ln`. // - Indexing with index `ln - i - chunk = ln - (i + chunk)` is fine: // - `i + chunk > 0` is trivially true. // - The loop check guarantees: // `i + chunk - 1 < ln / 2` // <=> `i + chunk ≤ ln / 2 ≤ ln`, thus subtraction does not underflow. // - The `read_unaligned` and `write_unaligned` calls are fine: // - `pa` points to index `i` where `i < ln / 2 - (chunk - 1)` // (see above) and `pb` points to index `ln - i - chunk`, so // both are at least `chunk` // many bytes away from the end of `self`. // - Any initialized memory is valid `usize`. unsafe { let pa: *mut T = self.get_unchecked_mut(i); let pb: *mut T = self.get_unchecked_mut(ln - i - chunk); let va = ptr::read_unaligned(pa as *mut usize); let vb = ptr::read_unaligned(pb as *mut usize); ptr::write_unaligned(pa as *mut usize, vb.swap_bytes()); ptr::write_unaligned(pb as *mut usize, va.swap_bytes()); } i += chunk; } } if fast_unaligned && mem::size_of::() == 2 { // Use rotate-by-16 to reverse u16s in a u32 let chunk = mem::size_of::() / 2; while i + chunk - 1 < ln / 2 { // SAFETY: An unaligned u32 can be read from `i` if `i + 1 < ln` // (and obviously `i < ln`), because each element is 2 bytes and // we're reading 4. // // `i + chunk - 1 < ln / 2` # while condition // `i + 2 - 1 < ln / 2` // `i + 1 < ln / 2` // // Since it's less than the length divided by 2, then it must be // in bounds. // // This also means that the condition `0 < i + chunk <= ln` is // always respected, ensuring the `pb` pointer can be used // safely. unsafe { let pa: *mut T = self.get_unchecked_mut(i); let pb: *mut T = self.get_unchecked_mut(ln - i - chunk); let va = ptr::read_unaligned(pa as *mut u32); let vb = ptr::read_unaligned(pb as *mut u32); ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16)); ptr::write_unaligned(pb as *mut u32, va.rotate_left(16)); } i += chunk; } } while i < ln / 2 { // SAFETY: `i` is inferior to half the length of the slice so // accessing `i` and `ln - i - 1` is safe (`i` starts at 0 and // will not go further than `ln / 2 - 1`). // The resulting pointers `pa` and `pb` are therefore valid and // aligned, and can be read from and written to. unsafe { // Unsafe swap to avoid the bounds check in safe swap. let pa: *mut T = self.get_unchecked_mut(i); let pb: *mut T = self.get_unchecked_mut(ln - i - 1); ptr::swap(pa, pb); } i += 1; } } /// Returns an iterator over the slice. /// /// # Examples /// /// ``` /// let x = &[1, 2, 4]; /// let mut iterator = x.iter(); /// /// assert_eq!(iterator.next(), Some(&1)); /// assert_eq!(iterator.next(), Some(&2)); /// assert_eq!(iterator.next(), Some(&4)); /// assert_eq!(iterator.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn iter(&self) -> Iter<'_, T> { let ptr = self.as_ptr(); // SAFETY: There are several things here: // // `ptr` has been obtained by `self.as_ptr()` where `self` is a valid // reference thus it is non-NUL and safe to use and pass to // `NonNull::new_unchecked` . // // Adding `self.len()` to the starting pointer gives a pointer // at the end of `self`. `end` will never be dereferenced, only checked // for direct pointer equality with `ptr` to check if the iterator is // done. // // In the case of a ZST, the end pointer is just the start pointer plus // the length, to also allows for the fast `ptr == end` check. // // See the `next_unchecked!` and `is_empty!` macros as well as the // `post_inc_start` method for more informations. unsafe { assume(!ptr.is_null()); let end = if mem::size_of::() == 0 { (ptr as *const u8).wrapping_add(self.len()) as *const T } else { ptr.add(self.len()) }; Iter { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: marker::PhantomData } } } /// Returns an iterator that allows modifying each value. /// /// # Examples /// /// ``` /// let x = &mut [1, 2, 4]; /// for elem in x.iter_mut() { /// *elem += 2; /// } /// assert_eq!(x, &[3, 4, 6]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn iter_mut(&mut self) -> IterMut<'_, T> { let ptr = self.as_mut_ptr(); // SAFETY: There are several things here: // // `ptr` has been obtained by `self.as_ptr()` where `self` is a valid // reference thus it is non-NUL and safe to use and pass to // `NonNull::new_unchecked` . // // Adding `self.len()` to the starting pointer gives a pointer // at the end of `self`. `end` will never be dereferenced, only checked // for direct pointer equality with `ptr` to check if the iterator is // done. // // In the case of a ZST, the end pointer is just the start pointer plus // the length, to also allows for the fast `ptr == end` check. // // See the `next_unchecked!` and `is_empty!` macros as well as the // `post_inc_start` method for more informations. unsafe { assume(!ptr.is_null()); let end = if mem::size_of::() == 0 { (ptr as *mut u8).wrapping_add(self.len()) as *mut T } else { ptr.add(self.len()) }; IterMut { ptr: NonNull::new_unchecked(ptr), end, _marker: marker::PhantomData } } } /// Returns an iterator over all contiguous windows of length /// `size`. The windows overlap. If the slice is shorter than /// `size`, the iterator returns no values. /// /// # Panics /// /// Panics if `size` is 0. /// /// # Examples /// /// ``` /// let slice = ['r', 'u', 's', 't']; /// let mut iter = slice.windows(2); /// assert_eq!(iter.next().unwrap(), &['r', 'u']); /// assert_eq!(iter.next().unwrap(), &['u', 's']); /// assert_eq!(iter.next().unwrap(), &['s', 't']); /// assert!(iter.next().is_none()); /// ``` /// /// If the slice is shorter than `size`: /// /// ``` /// let slice = ['f', 'o', 'o']; /// let mut iter = slice.windows(4); /// assert!(iter.next().is_none()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn windows(&self, size: usize) -> Windows<'_, T> { assert_ne!(size, 0); Windows { v: self, size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the /// beginning of the slice. /// /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the /// slice, then the last chunk will not have length `chunk_size`. /// /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the /// slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let mut iter = slice.chunks(2); /// assert_eq!(iter.next().unwrap(), &['l', 'o']); /// assert_eq!(iter.next().unwrap(), &['r', 'e']); /// assert_eq!(iter.next().unwrap(), &['m']); /// assert!(iter.next().is_none()); /// ``` /// /// [`chunks_exact`]: #method.chunks_exact /// [`rchunks`]: #method.rchunks #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> { assert_ne!(chunk_size, 0); Chunks { v: self, chunk_size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the /// beginning of the slice. /// /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the /// length of the slice, then the last chunk will not have length `chunk_size`. /// /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at /// the end of the slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// /// for chunk in v.chunks_mut(2) { /// for elem in chunk.iter_mut() { /// *elem += count; /// } /// count += 1; /// } /// assert_eq!(v, &[1, 1, 2, 2, 3]); /// ``` /// /// [`chunks_exact_mut`]: #method.chunks_exact_mut /// [`rchunks_mut`]: #method.rchunks_mut #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> { assert_ne!(chunk_size, 0); ChunksMut { v: self, chunk_size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the /// beginning of the slice. /// /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved /// from the `remainder` function of the iterator. /// /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the /// resulting code better than in the case of [`chunks`]. /// /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let mut iter = slice.chunks_exact(2); /// assert_eq!(iter.next().unwrap(), &['l', 'o']); /// assert_eq!(iter.next().unwrap(), &['r', 'e']); /// assert!(iter.next().is_none()); /// assert_eq!(iter.remainder(), &['m']); /// ``` /// /// [`chunks`]: #method.chunks /// [`rchunks_exact`]: #method.rchunks_exact #[stable(feature = "chunks_exact", since = "1.31.0")] #[inline] pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> { assert_ne!(chunk_size, 0); let rem = self.len() % chunk_size; let fst_len = self.len() - rem; // SAFETY: 0 <= fst_len <= self.len() by construction above let (fst, snd) = unsafe { self.split_at_unchecked(fst_len) }; ChunksExact { v: fst, rem: snd, chunk_size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the /// beginning of the slice. /// /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be /// retrieved from the `into_remainder` function of the iterator. /// /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the /// resulting code better than in the case of [`chunks_mut`]. /// /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of /// the slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// /// for chunk in v.chunks_exact_mut(2) { /// for elem in chunk.iter_mut() { /// *elem += count; /// } /// count += 1; /// } /// assert_eq!(v, &[1, 1, 2, 2, 0]); /// ``` /// /// [`chunks_mut`]: #method.chunks_mut /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut #[stable(feature = "chunks_exact", since = "1.31.0")] #[inline] pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> { assert_ne!(chunk_size, 0); let rem = self.len() % chunk_size; let fst_len = self.len() - rem; // SAFETY: 0 <= fst_len <= self.len() by construction above let (fst, snd) = unsafe { self.split_at_mut_unchecked(fst_len) }; ChunksExactMut { v: fst, rem: snd, chunk_size } } /// Returns an iterator over `N` elements of the slice at a time, starting at the /// beginning of the slice. /// /// The chunks are array references and do not overlap. If `N` does not divide the /// length of the slice, then the last up to `N-1` elements will be omitted and can be /// retrieved from the `remainder` function of the iterator. /// /// This method is the const generic equivalent of [`chunks_exact`]. /// /// # Panics /// /// Panics if `N` is 0. This check will most probably get changed to a compile time /// error before this method gets stabilized. /// /// # Examples /// /// ``` /// #![feature(array_chunks)] /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let mut iter = slice.array_chunks(); /// assert_eq!(iter.next().unwrap(), &['l', 'o']); /// assert_eq!(iter.next().unwrap(), &['r', 'e']); /// assert!(iter.next().is_none()); /// assert_eq!(iter.remainder(), &['m']); /// ``` /// /// [`chunks_exact`]: #method.chunks_exact #[unstable(feature = "array_chunks", issue = "74985")] #[inline] pub fn array_chunks(&self) -> ArrayChunks<'_, T, N> { assert_ne!(N, 0); let len = self.len() / N; let (fst, snd) = self.split_at(len * N); // SAFETY: We cast a slice of `len * N` elements into // a slice of `len` many `N` elements chunks. let array_slice: &[[T; N]] = unsafe { from_raw_parts(fst.as_ptr().cast(), len) }; ArrayChunks { iter: array_slice.iter(), rem: snd } } /// Returns an iterator over `N` elements of the slice at a time, starting at the /// beginning of the slice. /// /// The chunks are mutable array references and do not overlap. If `N` does not divide /// the length of the slice, then the last up to `N-1` elements will be omitted and /// can be retrieved from the `into_remainder` function of the iterator. /// /// This method is the const generic equivalent of [`chunks_exact_mut`]. /// /// # Panics /// /// Panics if `N` is 0. This check will most probably get changed to a compile time /// error before this method gets stabilized. /// /// # Examples /// /// ``` /// #![feature(array_chunks)] /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// /// for chunk in v.array_chunks_mut() { /// *chunk = [count; 2]; /// count += 1; /// } /// assert_eq!(v, &[1, 1, 2, 2, 0]); /// ``` /// /// [`chunks_exact_mut`]: #method.chunks_exact_mut #[unstable(feature = "array_chunks", issue = "74985")] #[inline] pub fn array_chunks_mut(&mut self) -> ArrayChunksMut<'_, T, N> { assert_ne!(N, 0); let len = self.len() / N; let (fst_ptr, snd) = { // Scope the first slice into a pointer to avoid aliasing the new slice below. let (fst, snd) = self.split_at_mut(len * N); (fst.as_mut_ptr(), snd) }; // SAFETY: We cast a slice of `len * N` elements into // a slice of `len` many `N` elements chunks. let array_slice: &mut [[T; N]] = unsafe { from_raw_parts_mut(fst_ptr.cast(), len) }; ArrayChunksMut { iter: array_slice.iter_mut(), rem: snd } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end /// of the slice. /// /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the /// slice, then the last chunk will not have length `chunk_size`. /// /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning /// of the slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let mut iter = slice.rchunks(2); /// assert_eq!(iter.next().unwrap(), &['e', 'm']); /// assert_eq!(iter.next().unwrap(), &['o', 'r']); /// assert_eq!(iter.next().unwrap(), &['l']); /// assert!(iter.next().is_none()); /// ``` /// /// [`rchunks_exact`]: #method.rchunks_exact /// [`chunks`]: #method.chunks #[stable(feature = "rchunks", since = "1.31.0")] #[inline] pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> { assert!(chunk_size != 0); RChunks { v: self, chunk_size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end /// of the slice. /// /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the /// length of the slice, then the last chunk will not have length `chunk_size`. /// /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the /// beginning of the slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// /// for chunk in v.rchunks_mut(2) { /// for elem in chunk.iter_mut() { /// *elem += count; /// } /// count += 1; /// } /// assert_eq!(v, &[3, 2, 2, 1, 1]); /// ``` /// /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut /// [`chunks_mut`]: #method.chunks_mut #[stable(feature = "rchunks", since = "1.31.0")] #[inline] pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> { assert!(chunk_size != 0); RChunksMut { v: self, chunk_size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the /// end of the slice. /// /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved /// from the `remainder` function of the iterator. /// /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the /// resulting code better than in the case of [`chunks`]. /// /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the /// slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let mut iter = slice.rchunks_exact(2); /// assert_eq!(iter.next().unwrap(), &['e', 'm']); /// assert_eq!(iter.next().unwrap(), &['o', 'r']); /// assert!(iter.next().is_none()); /// assert_eq!(iter.remainder(), &['l']); /// ``` /// /// [`chunks`]: #method.chunks /// [`rchunks`]: #method.rchunks /// [`chunks_exact`]: #method.chunks_exact #[stable(feature = "rchunks", since = "1.31.0")] #[inline] pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> { assert!(chunk_size != 0); let rem = self.len() % chunk_size; // SAFETY: 0 <= rem <= self.len() by construction above let (fst, snd) = unsafe { self.split_at_unchecked(rem) }; RChunksExact { v: snd, rem: fst, chunk_size } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end /// of the slice. /// /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be /// retrieved from the `into_remainder` function of the iterator. /// /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the /// resulting code better than in the case of [`chunks_mut`]. /// /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning /// of the slice. /// /// # Panics /// /// Panics if `chunk_size` is 0. /// /// # Examples /// /// ``` /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// /// for chunk in v.rchunks_exact_mut(2) { /// for elem in chunk.iter_mut() { /// *elem += count; /// } /// count += 1; /// } /// assert_eq!(v, &[0, 2, 2, 1, 1]); /// ``` /// /// [`chunks_mut`]: #method.chunks_mut /// [`rchunks_mut`]: #method.rchunks_mut /// [`chunks_exact_mut`]: #method.chunks_exact_mut #[stable(feature = "rchunks", since = "1.31.0")] #[inline] pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> { assert!(chunk_size != 0); let rem = self.len() % chunk_size; // SAFETY: 0 <= rem <= self.len() by construction above let (fst, snd) = unsafe { self.split_at_mut_unchecked(rem) }; RChunksExactMut { v: snd, rem: fst, chunk_size } } /// Divides one slice into two at an index. /// /// The first will contain all indices from `[0, mid)` (excluding /// the index `mid` itself) and the second will contain all /// indices from `[mid, len)` (excluding the index `len` itself). /// /// # Panics /// /// Panics if `mid > len`. /// /// # Examples /// /// ``` /// let v = [1, 2, 3, 4, 5, 6]; /// /// { /// let (left, right) = v.split_at(0); /// assert_eq!(left, []); /// assert_eq!(right, [1, 2, 3, 4, 5, 6]); /// } /// /// { /// let (left, right) = v.split_at(2); /// assert_eq!(left, [1, 2]); /// assert_eq!(right, [3, 4, 5, 6]); /// } /// /// { /// let (left, right) = v.split_at(6); /// assert_eq!(left, [1, 2, 3, 4, 5, 6]); /// assert_eq!(right, []); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn split_at(&self, mid: usize) -> (&[T], &[T]) { assert!(mid <= self.len()); // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which // fulfills the requirements of `from_raw_parts_mut`. unsafe { self.split_at_unchecked(mid) } } /// Divides one mutable slice into two at an index. /// /// The first will contain all indices from `[0, mid)` (excluding /// the index `mid` itself) and the second will contain all /// indices from `[mid, len)` (excluding the index `len` itself). /// /// # Panics /// /// Panics if `mid > len`. /// /// # Examples /// /// ``` /// let mut v = [1, 0, 3, 0, 5, 6]; /// // scoped to restrict the lifetime of the borrows /// { /// let (left, right) = v.split_at_mut(2); /// assert_eq!(left, [1, 0]); /// assert_eq!(right, [3, 0, 5, 6]); /// left[1] = 2; /// right[1] = 4; /// } /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { assert!(mid <= self.len()); // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which // fulfills the requirements of `from_raw_parts_mut`. unsafe { self.split_at_mut_unchecked(mid) } } /// Divides one slice into two at an index, without doing bounds checking. /// /// The first will contain all indices from `[0, mid)` (excluding /// the index `mid` itself) and the second will contain all /// indices from `[mid, len)` (excluding the index `len` itself). /// /// For a safe alternative see [`split_at`]. /// /// # Safety /// /// Calling this method with an out-of-bounds index is *[undefined behavior]* /// even if the resulting reference is not used. The caller has to ensure that /// `0 <= mid <= self.len()`. /// /// [`split_at`]: #method.split_at /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html /// /// # Examples /// /// ```compile_fail /// #![feature(slice_split_at_unchecked)] /// /// let v = [1, 2, 3, 4, 5, 6]; /// /// unsafe { /// let (left, right) = v.split_at_unchecked(0); /// assert_eq!(left, []); /// assert_eq!(right, [1, 2, 3, 4, 5, 6]); /// } /// /// unsafe { /// let (left, right) = v.split_at_unchecked(2); /// assert_eq!(left, [1, 2]); /// assert_eq!(right, [3, 4, 5, 6]); /// } /// /// unsafe { /// let (left, right) = v.split_at_unchecked(6); /// assert_eq!(left, [1, 2, 3, 4, 5, 6]); /// assert_eq!(right, []); /// } /// ``` #[unstable(feature = "slice_split_at_unchecked", reason = "new API", issue = "76014")] #[inline] unsafe fn split_at_unchecked(&self, mid: usize) -> (&[T], &[T]) { // SAFETY: Caller has to check that `0 <= mid <= self.len()` unsafe { (self.get_unchecked(..mid), self.get_unchecked(mid..)) } } /// Divides one mutable slice into two at an index, without doing bounds checking. /// /// The first will contain all indices from `[0, mid)` (excluding /// the index `mid` itself) and the second will contain all /// indices from `[mid, len)` (excluding the index `len` itself). /// /// For a safe alternative see [`split_at_mut`]. /// /// # Safety /// /// Calling this method with an out-of-bounds index is *[undefined behavior]* /// even if the resulting reference is not used. The caller has to ensure that /// `0 <= mid <= self.len()`. /// /// [`split_at_mut`]: #method.split_at_mut /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html /// /// # Examples /// /// ```compile_fail /// #![feature(slice_split_at_unchecked)] /// /// let mut v = [1, 0, 3, 0, 5, 6]; /// // scoped to restrict the lifetime of the borrows /// unsafe { /// let (left, right) = v.split_at_mut_unchecked(2); /// assert_eq!(left, [1, 0]); /// assert_eq!(right, [3, 0, 5, 6]); /// left[1] = 2; /// right[1] = 4; /// } /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); /// ``` #[unstable(feature = "slice_split_at_unchecked", reason = "new API", issue = "76014")] #[inline] unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) { let len = self.len(); let ptr = self.as_mut_ptr(); // SAFETY: Caller has to check that `0 <= mid <= self.len()`. // // `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference // is fine. unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) } } /// Returns an iterator over subslices separated by elements that match /// `pred`. The matched element is not contained in the subslices. /// /// # Examples /// /// ``` /// let slice = [10, 40, 33, 20]; /// let mut iter = slice.split(|num| num % 3 == 0); /// /// assert_eq!(iter.next().unwrap(), &[10, 40]); /// assert_eq!(iter.next().unwrap(), &[20]); /// assert!(iter.next().is_none()); /// ``` /// /// If the first element is matched, an empty slice will be the first item /// returned by the iterator. Similarly, if the last element in the slice /// is matched, an empty slice will be the last item returned by the /// iterator: /// /// ``` /// let slice = [10, 40, 33]; /// let mut iter = slice.split(|num| num % 3 == 0); /// /// assert_eq!(iter.next().unwrap(), &[10, 40]); /// assert_eq!(iter.next().unwrap(), &[]); /// assert!(iter.next().is_none()); /// ``` /// /// If two matched elements are directly adjacent, an empty slice will be /// present between them: /// /// ``` /// let slice = [10, 6, 33, 20]; /// let mut iter = slice.split(|num| num % 3 == 0); /// /// assert_eq!(iter.next().unwrap(), &[10]); /// assert_eq!(iter.next().unwrap(), &[]); /// assert_eq!(iter.next().unwrap(), &[20]); /// assert!(iter.next().is_none()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn split(&self, pred: F) -> Split<'_, T, F> where F: FnMut(&T) -> bool, { Split { v: self, pred, finished: false } } /// Returns an iterator over mutable subslices separated by elements that /// match `pred`. The matched element is not contained in the subslices. /// /// # Examples /// /// ``` /// let mut v = [10, 40, 30, 20, 60, 50]; /// /// for group in v.split_mut(|num| *num % 3 == 0) { /// group[0] = 1; /// } /// assert_eq!(v, [1, 40, 30, 1, 60, 1]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn split_mut(&mut self, pred: F) -> SplitMut<'_, T, F> where F: FnMut(&T) -> bool, { SplitMut { v: self, pred, finished: false } } /// Returns an iterator over subslices separated by elements that match /// `pred`. The matched element is contained in the end of the previous /// subslice as a terminator. /// /// # Examples /// /// ``` /// #![feature(split_inclusive)] /// let slice = [10, 40, 33, 20]; /// let mut iter = slice.split_inclusive(|num| num % 3 == 0); /// /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]); /// assert_eq!(iter.next().unwrap(), &[20]); /// assert!(iter.next().is_none()); /// ``` /// /// If the last element of the slice is matched, /// that element will be considered the terminator of the preceding slice. /// That slice will be the last item returned by the iterator. /// /// ``` /// #![feature(split_inclusive)] /// let slice = [3, 10, 40, 33]; /// let mut iter = slice.split_inclusive(|num| num % 3 == 0); /// /// assert_eq!(iter.next().unwrap(), &[3]); /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]); /// assert!(iter.next().is_none()); /// ``` #[unstable(feature = "split_inclusive", issue = "72360")] #[inline] pub fn split_inclusive(&self, pred: F) -> SplitInclusive<'_, T, F> where F: FnMut(&T) -> bool, { SplitInclusive { v: self, pred, finished: false } } /// Returns an iterator over mutable subslices separated by elements that /// match `pred`. The matched element is contained in the previous /// subslice as a terminator. /// /// # Examples /// /// ``` /// #![feature(split_inclusive)] /// let mut v = [10, 40, 30, 20, 60, 50]; /// /// for group in v.split_inclusive_mut(|num| *num % 3 == 0) { /// let terminator_idx = group.len()-1; /// group[terminator_idx] = 1; /// } /// assert_eq!(v, [10, 40, 1, 20, 1, 1]); /// ``` #[unstable(feature = "split_inclusive", issue = "72360")] #[inline] pub fn split_inclusive_mut(&mut self, pred: F) -> SplitInclusiveMut<'_, T, F> where F: FnMut(&T) -> bool, { SplitInclusiveMut { v: self, pred, finished: false } } /// Returns an iterator over subslices separated by elements that match /// `pred`, starting at the end of the slice and working backwards. /// The matched element is not contained in the subslices. /// /// # Examples /// /// ``` /// let slice = [11, 22, 33, 0, 44, 55]; /// let mut iter = slice.rsplit(|num| *num == 0); /// /// assert_eq!(iter.next().unwrap(), &[44, 55]); /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]); /// assert_eq!(iter.next(), None); /// ``` /// /// As with `split()`, if the first or last element is matched, an empty /// slice will be the first (or last) item returned by the iterator. /// /// ``` /// let v = &[0, 1, 1, 2, 3, 5, 8]; /// let mut it = v.rsplit(|n| *n % 2 == 0); /// assert_eq!(it.next().unwrap(), &[]); /// assert_eq!(it.next().unwrap(), &[3, 5]); /// assert_eq!(it.next().unwrap(), &[1, 1]); /// assert_eq!(it.next().unwrap(), &[]); /// assert_eq!(it.next(), None); /// ``` #[stable(feature = "slice_rsplit", since = "1.27.0")] #[inline] pub fn rsplit(&self, pred: F) -> RSplit<'_, T, F> where F: FnMut(&T) -> bool, { RSplit { inner: self.split(pred) } } /// Returns an iterator over mutable subslices separated by elements that /// match `pred`, starting at the end of the slice and working /// backwards. The matched element is not contained in the subslices. /// /// # Examples /// /// ``` /// let mut v = [100, 400, 300, 200, 600, 500]; /// /// let mut count = 0; /// for group in v.rsplit_mut(|num| *num % 3 == 0) { /// count += 1; /// group[0] = count; /// } /// assert_eq!(v, [3, 400, 300, 2, 600, 1]); /// ``` /// #[stable(feature = "slice_rsplit", since = "1.27.0")] #[inline] pub fn rsplit_mut(&mut self, pred: F) -> RSplitMut<'_, T, F> where F: FnMut(&T) -> bool, { RSplitMut { inner: self.split_mut(pred) } } /// Returns an iterator over subslices separated by elements that match /// `pred`, limited to returning at most `n` items. The matched element is /// not contained in the subslices. /// /// The last element returned, if any, will contain the remainder of the /// slice. /// /// # Examples /// /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`, /// `[20, 60, 50]`): /// /// ``` /// let v = [10, 40, 30, 20, 60, 50]; /// /// for group in v.splitn(2, |num| *num % 3 == 0) { /// println!("{:?}", group); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn splitn(&self, n: usize, pred: F) -> SplitN<'_, T, F> where F: FnMut(&T) -> bool, { SplitN { inner: GenericSplitN { iter: self.split(pred), count: n } } } /// Returns an iterator over subslices separated by elements that match /// `pred`, limited to returning at most `n` items. The matched element is /// not contained in the subslices. /// /// The last element returned, if any, will contain the remainder of the /// slice. /// /// # Examples /// /// ``` /// let mut v = [10, 40, 30, 20, 60, 50]; /// /// for group in v.splitn_mut(2, |num| *num % 3 == 0) { /// group[0] = 1; /// } /// assert_eq!(v, [1, 40, 30, 1, 60, 50]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn splitn_mut(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F> where F: FnMut(&T) -> bool, { SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n } } } /// Returns an iterator over subslices separated by elements that match /// `pred` limited to returning at most `n` items. This starts at the end of /// the slice and works backwards. The matched element is not contained in /// the subslices. /// /// The last element returned, if any, will contain the remainder of the /// slice. /// /// # Examples /// /// Print the slice split once, starting from the end, by numbers divisible /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`): /// /// ``` /// let v = [10, 40, 30, 20, 60, 50]; /// /// for group in v.rsplitn(2, |num| *num % 3 == 0) { /// println!("{:?}", group); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplitn(&self, n: usize, pred: F) -> RSplitN<'_, T, F> where F: FnMut(&T) -> bool, { RSplitN { inner: GenericSplitN { iter: self.rsplit(pred), count: n } } } /// Returns an iterator over subslices separated by elements that match /// `pred` limited to returning at most `n` items. This starts at the end of /// the slice and works backwards. The matched element is not contained in /// the subslices. /// /// The last element returned, if any, will contain the remainder of the /// slice. /// /// # Examples /// /// ``` /// let mut s = [10, 40, 30, 20, 60, 50]; /// /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) { /// group[0] = 1; /// } /// assert_eq!(s, [1, 40, 30, 20, 60, 1]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplitn_mut(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F> where F: FnMut(&T) -> bool, { RSplitNMut { inner: GenericSplitN { iter: self.rsplit_mut(pred), count: n } } } /// Returns `true` if the slice contains an element with the given value. /// /// # Examples /// /// ``` /// let v = [10, 40, 30]; /// assert!(v.contains(&30)); /// assert!(!v.contains(&50)); /// ``` /// /// If you do not have an `&T`, but just an `&U` such that `T: Borrow` /// (e.g. `String: Borrow`), you can use `iter().any`: /// /// ``` /// let v = [String::from("hello"), String::from("world")]; // slice of `String` /// assert!(v.iter().any(|e| e == "hello")); // search with `&str` /// assert!(!v.iter().any(|e| e == "hi")); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains(&self, x: &T) -> bool where T: PartialEq, { x.slice_contains(self) } /// Returns `true` if `needle` is a prefix of the slice. /// /// # Examples /// /// ``` /// let v = [10, 40, 30]; /// assert!(v.starts_with(&[10])); /// assert!(v.starts_with(&[10, 40])); /// assert!(!v.starts_with(&[50])); /// assert!(!v.starts_with(&[10, 50])); /// ``` /// /// Always returns `true` if `needle` is an empty slice: /// /// ``` /// let v = &[10, 40, 30]; /// assert!(v.starts_with(&[])); /// let v: &[u8] = &[]; /// assert!(v.starts_with(&[])); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq, { let n = needle.len(); self.len() >= n && needle == &self[..n] } /// Returns `true` if `needle` is a suffix of the slice. /// /// # Examples /// /// ``` /// let v = [10, 40, 30]; /// assert!(v.ends_with(&[30])); /// assert!(v.ends_with(&[40, 30])); /// assert!(!v.ends_with(&[50])); /// assert!(!v.ends_with(&[50, 30])); /// ``` /// /// Always returns `true` if `needle` is an empty slice: /// /// ``` /// let v = &[10, 40, 30]; /// assert!(v.ends_with(&[])); /// let v: &[u8] = &[]; /// assert!(v.ends_with(&[])); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq, { let (m, n) = (self.len(), needle.len()); m >= n && needle == &self[m - n..] } /// Returns a subslice with the prefix removed. /// /// This method returns [`None`] if slice does not start with `prefix`. /// Also it returns the original slice if `prefix` is an empty slice. /// /// # Examples /// /// ``` /// #![feature(slice_strip)] /// let v = &[10, 40, 30]; /// assert_eq!(v.strip_prefix(&[10]), Some(&[40, 30][..])); /// assert_eq!(v.strip_prefix(&[10, 40]), Some(&[30][..])); /// assert_eq!(v.strip_prefix(&[50]), None); /// assert_eq!(v.strip_prefix(&[10, 50]), None); /// ``` #[must_use = "returns the subslice without modifying the original"] #[unstable(feature = "slice_strip", issue = "73413")] pub fn strip_prefix(&self, prefix: &[T]) -> Option<&[T]> where T: PartialEq, { let n = prefix.len(); if n <= self.len() { let (head, tail) = self.split_at(n); if head == prefix { return Some(tail); } } None } /// Returns a subslice with the suffix removed. /// /// This method returns [`None`] if slice does not end with `suffix`. /// Also it returns the original slice if `suffix` is an empty slice /// /// # Examples /// /// ``` /// #![feature(slice_strip)] /// let v = &[10, 40, 30]; /// assert_eq!(v.strip_suffix(&[30]), Some(&[10, 40][..])); /// assert_eq!(v.strip_suffix(&[40, 30]), Some(&[10][..])); /// assert_eq!(v.strip_suffix(&[50]), None); /// assert_eq!(v.strip_suffix(&[50, 30]), None); /// ``` #[must_use = "returns the subslice without modifying the original"] #[unstable(feature = "slice_strip", issue = "73413")] pub fn strip_suffix(&self, suffix: &[T]) -> Option<&[T]> where T: PartialEq, { let (len, n) = (self.len(), suffix.len()); if n <= len { let (head, tail) = self.split_at(len - n); if tail == suffix { return Some(head); } } None } /// Binary searches this sorted slice for a given element. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// # Examples /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not /// found; the fourth could match any position in `[1, 4]`. /// /// ``` /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]; /// /// assert_eq!(s.binary_search(&13), Ok(9)); /// assert_eq!(s.binary_search(&4), Err(7)); /// assert_eq!(s.binary_search(&100), Err(13)); /// let r = s.binary_search(&1); /// assert!(match r { Ok(1..=4) => true, _ => false, }); /// ``` /// /// If you want to insert an item to a sorted vector, while maintaining /// sort order: /// /// ``` /// let mut s = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]; /// let num = 42; /// let idx = s.binary_search(&num).unwrap_or_else(|x| x); /// s.insert(idx, num); /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn binary_search(&self, x: &T) -> Result where T: Ord, { self.binary_search_by(|p| p.cmp(x)) } /// Binary searches this sorted slice with a comparator function. /// /// The comparator function should implement an order consistent /// with the sort order of the underlying slice, returning an /// order code that indicates whether its argument is `Less`, /// `Equal` or `Greater` the desired target. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// # Examples /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not /// found; the fourth could match any position in `[1, 4]`. /// /// ``` /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]; /// /// let seek = 13; /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9)); /// let seek = 4; /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7)); /// let seek = 100; /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13)); /// let seek = 1; /// let r = s.binary_search_by(|probe| probe.cmp(&seek)); /// assert!(match r { Ok(1..=4) => true, _ => false, }); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result where F: FnMut(&'a T) -> Ordering, { let s = self; let mut size = s.len(); if size == 0 { return Err(0); } let mut base = 0usize; while size > 1 { let half = size / 2; let mid = base + half; // SAFETY: the call is made safe by the following inconstants: // - `mid >= 0`: by definition // - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...` let cmp = f(unsafe { s.get_unchecked(mid) }); base = if cmp == Greater { base } else { mid }; size -= half; } // SAFETY: base is always in [0, size) because base <= mid. let cmp = f(unsafe { s.get_unchecked(base) }); if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) } } /// Binary searches this sorted slice with a key extraction function. /// /// Assumes that the slice is sorted by the key, for instance with /// [`sort_by_key`] using the same key extraction function. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// [`sort_by_key`]: #method.sort_by_key /// /// # Examples /// /// Looks up a series of four elements in a slice of pairs sorted by /// their second elements. The first is found, with a uniquely /// determined position; the second and third are not found; the /// fourth could match any position in `[1, 4]`. /// /// ``` /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1), /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), /// (1, 21), (2, 34), (4, 55)]; /// /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9)); /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7)); /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13)); /// let r = s.binary_search_by_key(&1, |&(a,b)| b); /// assert!(match r { Ok(1..=4) => true, _ => false, }); /// ``` #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")] #[inline] pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result where F: FnMut(&'a T) -> B, B: Ord, { self.binary_search_by(|k| f(k).cmp(b)) } /// Sorts the slice, but may not preserve the order of equal elements. /// /// This sort is unstable (i.e., may reorder equal elements), in-place /// (i.e., does not allocate), and *O*(*n* \* log(*n*)) worst-case. /// /// # Current implementation /// /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters, /// which combines the fast average case of randomized quicksort with the fast worst case of /// heapsort, while achieving linear time on slices with certain patterns. It uses some /// randomization to avoid degenerate cases, but with a fixed seed to always provide /// deterministic behavior. /// /// It is typically faster than stable sorting, except in a few special cases, e.g., when the /// slice consists of several concatenated sorted sequences. /// /// # Examples /// /// ``` /// let mut v = [-5, 4, 1, -3, 2]; /// /// v.sort_unstable(); /// assert!(v == [-5, -3, 1, 2, 4]); /// ``` /// /// [pdqsort]: https://github.com/orlp/pdqsort #[stable(feature = "sort_unstable", since = "1.20.0")] #[inline] pub fn sort_unstable(&mut self) where T: Ord, { sort::quicksort(self, |a, b| a.lt(b)); } /// Sorts the slice with a comparator function, but may not preserve the order of equal /// elements. /// /// This sort is unstable (i.e., may reorder equal elements), in-place /// (i.e., does not allocate), and *O*(*n* \* log(*n*)) worst-case. /// /// The comparator function must define a total ordering for the elements in the slice. If /// the ordering is not total, the order of the elements is unspecified. An order is a /// total order if it is (for all a, b and c): /// /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >. /// /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`. /// /// ``` /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0]; /// floats.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap()); /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]); /// ``` /// /// # Current implementation /// /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters, /// which combines the fast average case of randomized quicksort with the fast worst case of /// heapsort, while achieving linear time on slices with certain patterns. It uses some /// randomization to avoid degenerate cases, but with a fixed seed to always provide /// deterministic behavior. /// /// It is typically faster than stable sorting, except in a few special cases, e.g., when the /// slice consists of several concatenated sorted sequences. /// /// # Examples /// /// ``` /// let mut v = [5, 4, 1, 3, 2]; /// v.sort_unstable_by(|a, b| a.cmp(b)); /// assert!(v == [1, 2, 3, 4, 5]); /// /// // reverse sorting /// v.sort_unstable_by(|a, b| b.cmp(a)); /// assert!(v == [5, 4, 3, 2, 1]); /// ``` /// /// [pdqsort]: https://github.com/orlp/pdqsort #[stable(feature = "sort_unstable", since = "1.20.0")] #[inline] pub fn sort_unstable_by(&mut self, mut compare: F) where F: FnMut(&T, &T) -> Ordering, { sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less); } /// Sorts the slice with a key extraction function, but may not preserve the order of equal /// elements. /// /// This sort is unstable (i.e., may reorder equal elements), in-place /// (i.e., does not allocate), and *O*(m \* *n* \* log(*n*)) worst-case, where the key function is /// *O*(*m*). /// /// # Current implementation /// /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters, /// which combines the fast average case of randomized quicksort with the fast worst case of /// heapsort, while achieving linear time on slices with certain patterns. It uses some /// randomization to avoid degenerate cases, but with a fixed seed to always provide /// deterministic behavior. /// /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key) /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in /// cases where the key function is expensive. /// /// # Examples /// /// ``` /// let mut v = [-5i32, 4, 1, -3, 2]; /// /// v.sort_unstable_by_key(|k| k.abs()); /// assert!(v == [1, 2, -3, 4, -5]); /// ``` /// /// [pdqsort]: https://github.com/orlp/pdqsort #[stable(feature = "sort_unstable", since = "1.20.0")] #[inline] pub fn sort_unstable_by_key(&mut self, mut f: F) where F: FnMut(&T) -> K, K: Ord, { sort::quicksort(self, |a, b| f(a).lt(&f(b))); } /// Reorder the slice such that the element at `index` is at its final sorted position. /// /// This reordering has the additional property that any value at position `i < index` will be /// less than or equal to any value at a position `j > index`. Additionally, this reordering is /// unstable (i.e. any number of equal elements may end up at position `index`), in-place /// (i.e. does not allocate), and *O*(*n*) worst-case. This function is also/ known as "kth /// element" in other libraries. It returns a triplet of the following values: all elements less /// than the one at the given index, the value at the given index, and all elements greater than /// the one at the given index. /// /// # Current implementation /// /// The current algorithm is based on the quickselect portion of the same quicksort algorithm /// used for [`sort_unstable`]. /// /// [`sort_unstable`]: #method.sort_unstable /// /// # Panics /// /// Panics when `index >= len()`, meaning it always panics on empty slices. /// /// # Examples /// /// ``` /// #![feature(slice_partition_at_index)] /// /// let mut v = [-5i32, 4, 1, -3, 2]; /// /// // Find the median /// v.partition_at_index(2); /// /// // We are only guaranteed the slice will be one of the following, based on the way we sort /// // about the specified index. /// assert!(v == [-3, -5, 1, 2, 4] || /// v == [-5, -3, 1, 2, 4] || /// v == [-3, -5, 1, 4, 2] || /// v == [-5, -3, 1, 4, 2]); /// ``` #[unstable(feature = "slice_partition_at_index", issue = "55300")] #[inline] pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T]) where T: Ord, { let mut f = |a: &T, b: &T| a.lt(b); sort::partition_at_index(self, index, &mut f) } /// Reorder the slice with a comparator function such that the element at `index` is at its /// final sorted position. /// /// This reordering has the additional property that any value at position `i < index` will be /// less than or equal to any value at a position `j > index` using the comparator function. /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at /// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function /// is also known as "kth element" in other libraries. It returns a triplet of the following /// values: all elements less than the one at the given index, the value at the given index, /// and all elements greater than the one at the given index, using the provided comparator /// function. /// /// # Current implementation /// /// The current algorithm is based on the quickselect portion of the same quicksort algorithm /// used for [`sort_unstable`]. /// /// [`sort_unstable`]: #method.sort_unstable /// /// # Panics /// /// Panics when `index >= len()`, meaning it always panics on empty slices. /// /// # Examples /// /// ``` /// #![feature(slice_partition_at_index)] /// /// let mut v = [-5i32, 4, 1, -3, 2]; /// /// // Find the median as if the slice were sorted in descending order. /// v.partition_at_index_by(2, |a, b| b.cmp(a)); /// /// // We are only guaranteed the slice will be one of the following, based on the way we sort /// // about the specified index. /// assert!(v == [2, 4, 1, -5, -3] || /// v == [2, 4, 1, -3, -5] || /// v == [4, 2, 1, -5, -3] || /// v == [4, 2, 1, -3, -5]); /// ``` #[unstable(feature = "slice_partition_at_index", issue = "55300")] #[inline] pub fn partition_at_index_by( &mut self, index: usize, mut compare: F, ) -> (&mut [T], &mut T, &mut [T]) where F: FnMut(&T, &T) -> Ordering, { let mut f = |a: &T, b: &T| compare(a, b) == Less; sort::partition_at_index(self, index, &mut f) } /// Reorder the slice with a key extraction function such that the element at `index` is at its /// final sorted position. /// /// This reordering has the additional property that any value at position `i < index` will be /// less than or equal to any value at a position `j > index` using the key extraction function. /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at /// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function /// is also known as "kth element" in other libraries. It returns a triplet of the following /// values: all elements less than the one at the given index, the value at the given index, and /// all elements greater than the one at the given index, using the provided key extraction /// function. /// /// # Current implementation /// /// The current algorithm is based on the quickselect portion of the same quicksort algorithm /// used for [`sort_unstable`]. /// /// [`sort_unstable`]: #method.sort_unstable /// /// # Panics /// /// Panics when `index >= len()`, meaning it always panics on empty slices. /// /// # Examples /// /// ``` /// #![feature(slice_partition_at_index)] /// /// let mut v = [-5i32, 4, 1, -3, 2]; /// /// // Return the median as if the array were sorted according to absolute value. /// v.partition_at_index_by_key(2, |a| a.abs()); /// /// // We are only guaranteed the slice will be one of the following, based on the way we sort /// // about the specified index. /// assert!(v == [1, 2, -3, 4, -5] || /// v == [1, 2, -3, -5, 4] || /// v == [2, 1, -3, 4, -5] || /// v == [2, 1, -3, -5, 4]); /// ``` #[unstable(feature = "slice_partition_at_index", issue = "55300")] #[inline] pub fn partition_at_index_by_key( &mut self, index: usize, mut f: F, ) -> (&mut [T], &mut T, &mut [T]) where F: FnMut(&T) -> K, K: Ord, { let mut g = |a: &T, b: &T| f(a).lt(&f(b)); sort::partition_at_index(self, index, &mut g) } /// Moves all consecutive repeated elements to the end of the slice according to the /// [`PartialEq`] trait implementation. /// /// Returns two slices. The first contains no consecutive repeated elements. /// The second contains all the duplicates in no specified order. /// /// If the slice is sorted, the first returned slice contains no duplicates. /// /// # Examples /// /// ``` /// #![feature(slice_partition_dedup)] /// /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1]; /// /// let (dedup, duplicates) = slice.partition_dedup(); /// /// assert_eq!(dedup, [1, 2, 3, 2, 1]); /// assert_eq!(duplicates, [2, 3, 1]); /// ``` #[unstable(feature = "slice_partition_dedup", issue = "54279")] #[inline] pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T]) where T: PartialEq, { self.partition_dedup_by(|a, b| a == b) } /// Moves all but the first of consecutive elements to the end of the slice satisfying /// a given equality relation. /// /// Returns two slices. The first contains no consecutive repeated elements. /// The second contains all the duplicates in no specified order. /// /// The `same_bucket` function is passed references to two elements from the slice and /// must determine if the elements compare equal. The elements are passed in opposite order /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved /// at the end of the slice. /// /// If the slice is sorted, the first returned slice contains no duplicates. /// /// # Examples /// /// ``` /// #![feature(slice_partition_dedup)] /// /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"]; /// /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b)); /// /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]); /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]); /// ``` #[unstable(feature = "slice_partition_dedup", issue = "54279")] #[inline] pub fn partition_dedup_by(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T]) where F: FnMut(&mut T, &mut T) -> bool, { // Although we have a mutable reference to `self`, we cannot make // *arbitrary* changes. The `same_bucket` calls could panic, so we // must ensure that the slice is in a valid state at all times. // // The way that we handle this is by using swaps; we iterate // over all the elements, swapping as we go so that at the end // the elements we wish to keep are in the front, and those we // wish to reject are at the back. We can then split the slice. // This operation is still `O(n)`. // // Example: We start in this state, where `r` represents "next // read" and `w` represents "next_write`. // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing self[r] against self[w-1], this is not a duplicate, so // we swap self[r] and self[w] (no effect as r==w) and then increment both // r and w, leaving us with: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing self[r] against self[w-1], this value is a duplicate, // so we increment `r` but leave everything else unchanged: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing self[r] against self[w-1], this is not a duplicate, // so swap self[r] and self[w] and advance r and w: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 2 | 1 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Not a duplicate, repeat: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 2 | 3 | 1 | 3 | // +---+---+---+---+---+---+ // w // // Duplicate, advance r. End of slice. Split at w. let len = self.len(); if len <= 1 { return (self, &mut []); } let ptr = self.as_mut_ptr(); let mut next_read: usize = 1; let mut next_write: usize = 1; // SAFETY: the `while` condition guarantees `next_read` and `next_write` // are less than `len`, thus are inside `self`. `prev_ptr_write` points to // one element before `ptr_write`, but `next_write` starts at 1, so // `prev_ptr_write` is never less than 0 and is inside the slice. // This fulfils the requirements for dereferencing `ptr_read`, `prev_ptr_write` // and `ptr_write`, and for using `ptr.add(next_read)`, `ptr.add(next_write - 1)` // and `prev_ptr_write.offset(1)`. // // `next_write` is also incremented at most once per loop at most meaning // no element is skipped when it may need to be swapped. // // `ptr_read` and `prev_ptr_write` never point to the same element. This // is required for `&mut *ptr_read`, `&mut *prev_ptr_write` to be safe. // The explanation is simply that `next_read >= next_write` is always true, // thus `next_read > next_write - 1` is too. unsafe { // Avoid bounds checks by using raw pointers. while next_read < len { let ptr_read = ptr.add(next_read); let prev_ptr_write = ptr.add(next_write - 1); if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) { if next_read != next_write { let ptr_write = prev_ptr_write.offset(1); mem::swap(&mut *ptr_read, &mut *ptr_write); } next_write += 1; } next_read += 1; } } self.split_at_mut(next_write) } /// Moves all but the first of consecutive elements to the end of the slice that resolve /// to the same key. /// /// Returns two slices. The first contains no consecutive repeated elements. /// The second contains all the duplicates in no specified order. /// /// If the slice is sorted, the first returned slice contains no duplicates. /// /// # Examples /// /// ``` /// #![feature(slice_partition_dedup)] /// /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13]; /// /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10); /// /// assert_eq!(dedup, [10, 20, 30, 20, 11]); /// assert_eq!(duplicates, [21, 30, 13]); /// ``` #[unstable(feature = "slice_partition_dedup", issue = "54279")] #[inline] pub fn partition_dedup_by_key(&mut self, mut key: F) -> (&mut [T], &mut [T]) where F: FnMut(&mut T) -> K, K: PartialEq, { self.partition_dedup_by(|a, b| key(a) == key(b)) } /// Rotates the slice in-place such that the first `mid` elements of the /// slice move to the end while the last `self.len() - mid` elements move to /// the front. After calling `rotate_left`, the element previously at index /// `mid` will become the first element in the slice. /// /// # Panics /// /// This function will panic if `mid` is greater than the length of the /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op /// rotation. /// /// # Complexity /// /// Takes linear (in `self.len()`) time. /// /// # Examples /// /// ``` /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f']; /// a.rotate_left(2); /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']); /// ``` /// /// Rotating a subslice: /// /// ``` /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f']; /// a[1..5].rotate_left(1); /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']); /// ``` #[stable(feature = "slice_rotate", since = "1.26.0")] pub fn rotate_left(&mut self, mid: usize) { assert!(mid <= self.len()); let k = self.len() - mid; let p = self.as_mut_ptr(); // SAFETY: The range `[p.add(mid) - mid, p.add(mid) + k)` is trivially // valid for reading and writing, as required by `ptr_rotate`. unsafe { rotate::ptr_rotate(mid, p.add(mid), k); } } /// Rotates the slice in-place such that the first `self.len() - k` /// elements of the slice move to the end while the last `k` elements move /// to the front. After calling `rotate_right`, the element previously at /// index `self.len() - k` will become the first element in the slice. /// /// # Panics /// /// This function will panic if `k` is greater than the length of the /// slice. Note that `k == self.len()` does _not_ panic and is a no-op /// rotation. /// /// # Complexity /// /// Takes linear (in `self.len()`) time. /// /// # Examples /// /// ``` /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f']; /// a.rotate_right(2); /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']); /// ``` /// /// Rotate a subslice: /// /// ``` /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f']; /// a[1..5].rotate_right(1); /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']); /// ``` #[stable(feature = "slice_rotate", since = "1.26.0")] pub fn rotate_right(&mut self, k: usize) { assert!(k <= self.len()); let mid = self.len() - k; let p = self.as_mut_ptr(); // SAFETY: The range `[p.add(mid) - mid, p.add(mid) + k)` is trivially // valid for reading and writing, as required by `ptr_rotate`. unsafe { rotate::ptr_rotate(mid, p.add(mid), k); } } /// Fills `self` with elements by cloning `value`. /// /// # Examples /// /// ``` /// #![feature(slice_fill)] /// /// let mut buf = vec![0; 10]; /// buf.fill(1); /// assert_eq!(buf, vec![1; 10]); /// ``` #[unstable(feature = "slice_fill", issue = "70758")] pub fn fill(&mut self, value: T) where T: Clone, { if let Some((last, elems)) = self.split_last_mut() { for el in elems { el.clone_from(&value); } *last = value } } /// Copies the elements from `src` into `self`. /// /// The length of `src` must be the same as `self`. /// /// If `T` implements `Copy`, it can be more performant to use /// [`copy_from_slice`]. /// /// # Panics /// /// This function will panic if the two slices have different lengths. /// /// # Examples /// /// Cloning two elements from a slice into another: /// /// ``` /// let src = [1, 2, 3, 4]; /// let mut dst = [0, 0]; /// /// // Because the slices have to be the same length, /// // we slice the source slice from four elements /// // to two. It will panic if we don't do this. /// dst.clone_from_slice(&src[2..]); /// /// assert_eq!(src, [1, 2, 3, 4]); /// assert_eq!(dst, [3, 4]); /// ``` /// /// Rust enforces that there can only be one mutable reference with no /// immutable references to a particular piece of data in a particular /// scope. Because of this, attempting to use `clone_from_slice` on a /// single slice will result in a compile failure: /// /// ```compile_fail /// let mut slice = [1, 2, 3, 4, 5]; /// /// slice[..2].clone_from_slice(&slice[3..]); // compile fail! /// ``` /// /// To work around this, we can use [`split_at_mut`] to create two distinct /// sub-slices from a slice: /// /// ``` /// let mut slice = [1, 2, 3, 4, 5]; /// /// { /// let (left, right) = slice.split_at_mut(2); /// left.clone_from_slice(&right[1..]); /// } /// /// assert_eq!(slice, [4, 5, 3, 4, 5]); /// ``` /// /// [`copy_from_slice`]: #method.copy_from_slice /// [`split_at_mut`]: #method.split_at_mut #[stable(feature = "clone_from_slice", since = "1.7.0")] pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone, { assert!(self.len() == src.len(), "destination and source slices have different lengths"); // NOTE: We need to explicitly slice them to the same length // for bounds checking to be elided, and the optimizer will // generate memcpy for simple cases (for example T = u8). let len = self.len(); let src = &src[..len]; for i in 0..len { self[i].clone_from(&src[i]); } } /// Copies all elements from `src` into `self`, using a memcpy. /// /// The length of `src` must be the same as `self`. /// /// If `T` does not implement `Copy`, use [`clone_from_slice`]. /// /// # Panics /// /// This function will panic if the two slices have different lengths. /// /// # Examples /// /// Copying two elements from a slice into another: /// /// ``` /// let src = [1, 2, 3, 4]; /// let mut dst = [0, 0]; /// /// // Because the slices have to be the same length, /// // we slice the source slice from four elements /// // to two. It will panic if we don't do this. /// dst.copy_from_slice(&src[2..]); /// /// assert_eq!(src, [1, 2, 3, 4]); /// assert_eq!(dst, [3, 4]); /// ``` /// /// Rust enforces that there can only be one mutable reference with no /// immutable references to a particular piece of data in a particular /// scope. Because of this, attempting to use `copy_from_slice` on a /// single slice will result in a compile failure: /// /// ```compile_fail /// let mut slice = [1, 2, 3, 4, 5]; /// /// slice[..2].copy_from_slice(&slice[3..]); // compile fail! /// ``` /// /// To work around this, we can use [`split_at_mut`] to create two distinct /// sub-slices from a slice: /// /// ``` /// let mut slice = [1, 2, 3, 4, 5]; /// /// { /// let (left, right) = slice.split_at_mut(2); /// left.copy_from_slice(&right[1..]); /// } /// /// assert_eq!(slice, [4, 5, 3, 4, 5]); /// ``` /// /// [`clone_from_slice`]: #method.clone_from_slice /// [`split_at_mut`]: #method.split_at_mut #[stable(feature = "copy_from_slice", since = "1.9.0")] pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy, { // The panic code path was put into a cold function to not bloat the // call site. #[inline(never)] #[cold] #[track_caller] fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { panic!( "source slice length ({}) does not match destination slice length ({})", src_len, dst_len, ); } if self.len() != src.len() { len_mismatch_fail(self.len(), src.len()); } // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was // checked to have the same length. The slices cannot overlap because // mutable references are exclusive. unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); } } /// Copies elements from one part of the slice to another part of itself, /// using a memmove. /// /// `src` is the range within `self` to copy from. `dest` is the starting /// index of the range within `self` to copy to, which will have the same /// length as `src`. The two ranges may overlap. The ends of the two ranges /// must be less than or equal to `self.len()`. /// /// # Panics /// /// This function will panic if either range exceeds the end of the slice, /// or if the end of `src` is before the start. /// /// # Examples /// /// Copying four bytes within a slice: /// /// ``` /// let mut bytes = *b"Hello, World!"; /// /// bytes.copy_within(1..5, 8); /// /// assert_eq!(&bytes, b"Hello, Wello!"); /// ``` #[stable(feature = "copy_within", since = "1.37.0")] #[track_caller] pub fn copy_within>(&mut self, src: R, dest: usize) where T: Copy, { let Range { start: src_start, end: src_end } = self.check_range(src); let count = src_end - src_start; assert!(dest <= self.len() - count, "dest is out of bounds"); // SAFETY: the conditions for `ptr::copy` have all been checked above, // as have those for `ptr::add`. unsafe { ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count); } } /// Swaps all elements in `self` with those in `other`. /// /// The length of `other` must be the same as `self`. /// /// # Panics /// /// This function will panic if the two slices have different lengths. /// /// # Example /// /// Swapping two elements across slices: /// /// ``` /// let mut slice1 = [0, 0]; /// let mut slice2 = [1, 2, 3, 4]; /// /// slice1.swap_with_slice(&mut slice2[2..]); /// /// assert_eq!(slice1, [3, 4]); /// assert_eq!(slice2, [1, 2, 0, 0]); /// ``` /// /// Rust enforces that there can only be one mutable reference to a /// particular piece of data in a particular scope. Because of this, /// attempting to use `swap_with_slice` on a single slice will result in /// a compile failure: /// /// ```compile_fail /// let mut slice = [1, 2, 3, 4, 5]; /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail! /// ``` /// /// To work around this, we can use [`split_at_mut`] to create two distinct /// mutable sub-slices from a slice: /// /// ``` /// let mut slice = [1, 2, 3, 4, 5]; /// /// { /// let (left, right) = slice.split_at_mut(2); /// left.swap_with_slice(&mut right[1..]); /// } /// /// assert_eq!(slice, [4, 5, 3, 1, 2]); /// ``` /// /// [`split_at_mut`]: #method.split_at_mut #[stable(feature = "swap_with_slice", since = "1.27.0")] pub fn swap_with_slice(&mut self, other: &mut [T]) { assert!(self.len() == other.len(), "destination and source slices have different lengths"); // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was // checked to have the same length. The slices cannot overlap because // mutable references are exclusive. unsafe { ptr::swap_nonoverlapping(self.as_mut_ptr(), other.as_mut_ptr(), self.len()); } } /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`. fn align_to_offsets(&self) -> (usize, usize) { // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a // lowest number of `T`s. And how many `T`s we need for each such "multiple". // // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider // for example a case where size_of:: = 16, size_of:: = 24. We can put 2 Us in // place of every 3 Ts in the `rest` slice. A bit more complicated. // // Formula to calculate this is: // // Us = lcm(size_of::, size_of::) / size_of:: // Ts = lcm(size_of::, size_of::) / size_of:: // // Expanded and simplified: // // Us = size_of:: / gcd(size_of::, size_of::) // Ts = size_of:: / gcd(size_of::, size_of::) // // Luckily since all this is constant-evaluated... performance here matters not! #[inline] fn gcd(a: usize, b: usize) -> usize { use crate::intrinsics; // iterative stein’s algorithm // We should still make this `const fn` (and revert to recursive algorithm if we do) // because relying on llvm to consteval all this is… well, it makes me uncomfortable. // SAFETY: `a` and `b` are checked to be non-zero values. let (ctz_a, mut ctz_b) = unsafe { if a == 0 { return b; } if b == 0 { return a; } (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b)) }; let k = ctz_a.min(ctz_b); let mut a = a >> ctz_a; let mut b = b; loop { // remove all factors of 2 from b b >>= ctz_b; if a > b { mem::swap(&mut a, &mut b); } b = b - a; // SAFETY: `b` is checked to be non-zero. unsafe { if b == 0 { break; } ctz_b = intrinsics::cttz_nonzero(b); } } a << k } let gcd: usize = gcd(mem::size_of::(), mem::size_of::()); let ts: usize = mem::size_of::() / gcd; let us: usize = mem::size_of::() / gcd; // Armed with this knowledge, we can find how many `U`s we can fit! let us_len = self.len() / ts * us; // And how many `T`s will be in the trailing slice! let ts_len = self.len() % ts; (us_len, ts_len) } /// Transmute the slice to a slice of another type, ensuring alignment of the types is /// maintained. /// /// This method splits the slice into three distinct slices: prefix, correctly aligned middle /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest /// length possible for a given type and input slice, but only your algorithm's performance /// should depend on that, not its correctness. It is permissible for all of the input data to /// be returned as the prefix or suffix slice. /// /// This method has no purpose when either input element `T` or output element `U` are /// zero-sized and will return the original slice without splitting anything. /// /// # Safety /// /// This method is essentially a `transmute` with respect to the elements in the returned /// middle slice, so all the usual caveats pertaining to `transmute::` also apply here. /// /// # Examples /// /// Basic usage: /// /// ``` /// unsafe { /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7]; /// let (prefix, shorts, suffix) = bytes.align_to::(); /// // less_efficient_algorithm_for_bytes(prefix); /// // more_efficient_algorithm_for_aligned_shorts(shorts); /// // less_efficient_algorithm_for_bytes(suffix); /// } /// ``` #[stable(feature = "slice_align_to", since = "1.30.0")] pub unsafe fn align_to(&self) -> (&[T], &[U], &[T]) { // Note that most of this function will be constant-evaluated, if mem::size_of::() == 0 || mem::size_of::() == 0 { // handle ZSTs specially, which is – don't handle them at all. return (self, &[], &[]); } // First, find at what point do we split between the first and 2nd slice. Easy with // ptr.align_offset. let ptr = self.as_ptr(); // SAFETY: See the `align_to_mut` method for the detailed safety comment. let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::()) }; if offset > self.len() { (self, &[], &[]) } else { let (left, rest) = self.split_at(offset); let (us_len, ts_len) = rest.align_to_offsets::(); // SAFETY: now `rest` is definitely aligned, so `from_raw_parts` below is okay, // since the caller guarantees that we can transmute `T` to `U` safely. unsafe { ( left, from_raw_parts(rest.as_ptr() as *const U, us_len), from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), ) } } } /// Transmute the slice to a slice of another type, ensuring alignment of the types is /// maintained. /// /// This method splits the slice into three distinct slices: prefix, correctly aligned middle /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest /// length possible for a given type and input slice, but only your algorithm's performance /// should depend on that, not its correctness. It is permissible for all of the input data to /// be returned as the prefix or suffix slice. /// /// This method has no purpose when either input element `T` or output element `U` are /// zero-sized and will return the original slice without splitting anything. /// /// # Safety /// /// This method is essentially a `transmute` with respect to the elements in the returned /// middle slice, so all the usual caveats pertaining to `transmute::` also apply here. /// /// # Examples /// /// Basic usage: /// /// ``` /// unsafe { /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7]; /// let (prefix, shorts, suffix) = bytes.align_to_mut::(); /// // less_efficient_algorithm_for_bytes(prefix); /// // more_efficient_algorithm_for_aligned_shorts(shorts); /// // less_efficient_algorithm_for_bytes(suffix); /// } /// ``` #[stable(feature = "slice_align_to", since = "1.30.0")] pub unsafe fn align_to_mut(&mut self) -> (&mut [T], &mut [U], &mut [T]) { // Note that most of this function will be constant-evaluated, if mem::size_of::() == 0 || mem::size_of::() == 0 { // handle ZSTs specially, which is – don't handle them at all. return (self, &mut [], &mut []); } // First, find at what point do we split between the first and 2nd slice. Easy with // ptr.align_offset. let ptr = self.as_ptr(); // SAFETY: Here we are ensuring we will use aligned pointers for U for the // rest of the method. This is done by passing a pointer to &[T] with an // alignment targeted for U. // `crate::ptr::align_offset` is called with a correctly aligned and // valid pointer `ptr` (it comes from a reference to `self`) and with // a size that is a power of two (since it comes from the alignement for U), // satisfying its safety constraints. let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::()) }; if offset > self.len() { (self, &mut [], &mut []) } else { let (left, rest) = self.split_at_mut(offset); let (us_len, ts_len) = rest.align_to_offsets::(); let rest_len = rest.len(); let mut_ptr = rest.as_mut_ptr(); // We can't use `rest` again after this, that would invalidate its alias `mut_ptr`! // SAFETY: see comments for `align_to`. unsafe { ( left, from_raw_parts_mut(mut_ptr as *mut U, us_len), from_raw_parts_mut(mut_ptr.add(rest_len - ts_len), ts_len), ) } } } /// Checks if the elements of this slice are sorted. /// /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the /// slice yields exactly zero or one element, `true` is returned. /// /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition /// implies that this function returns `false` if any two consecutive items are not /// comparable. /// /// # Examples /// /// ``` /// #![feature(is_sorted)] /// let empty: [i32; 0] = []; /// /// assert!([1, 2, 2, 9].is_sorted()); /// assert!(![1, 3, 2, 4].is_sorted()); /// assert!([0].is_sorted()); /// assert!(empty.is_sorted()); /// assert!(![0.0, 1.0, f32::NAN].is_sorted()); /// ``` #[inline] #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")] pub fn is_sorted(&self) -> bool where T: PartialOrd, { self.is_sorted_by(|a, b| a.partial_cmp(b)) } /// Checks if the elements of this slice are sorted using the given comparator function. /// /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare` /// function to determine the ordering of two elements. Apart from that, it's equivalent to /// [`is_sorted`]; see its documentation for more information. /// /// [`is_sorted`]: #method.is_sorted #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")] pub fn is_sorted_by(&self, mut compare: F) -> bool where F: FnMut(&T, &T) -> Option, { self.iter().is_sorted_by(|a, b| compare(*a, *b)) } /// Checks if the elements of this slice are sorted using the given key extraction function. /// /// Instead of comparing the slice's elements directly, this function compares the keys of the /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its /// documentation for more information. /// /// [`is_sorted`]: #method.is_sorted /// /// # Examples /// /// ``` /// #![feature(is_sorted)] /// /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len())); /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs())); /// ``` #[inline] #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")] pub fn is_sorted_by_key(&self, f: F) -> bool where F: FnMut(&T) -> K, K: PartialOrd, { self.iter().is_sorted_by_key(f) } /// Returns the index of the partition point according to the given predicate /// (the index of the first element of the second partition). /// /// The slice is assumed to be partitioned according to the given predicate. /// This means that all elements for which the predicate returns true are at the start of the slice /// and all elements for which the predicate returns false are at the end. /// For example, [7, 15, 3, 5, 4, 12, 6] is a partitioned under the predicate x % 2 != 0 /// (all odd numbers are at the start, all even at the end). /// /// If this slice is not partitioned, the returned result is unspecified and meaningless, /// as this method performs a kind of binary search. /// /// # Examples /// /// ``` /// #![feature(partition_point)] /// /// let v = [1, 2, 3, 3, 5, 6, 7]; /// let i = v.partition_point(|&x| x < 5); /// /// assert_eq!(i, 4); /// assert!(v[..i].iter().all(|&x| x < 5)); /// assert!(v[i..].iter().all(|&x| !(x < 5))); /// ``` #[unstable(feature = "partition_point", reason = "new API", issue = "73831")] pub fn partition_point

(&self, mut pred: P) -> usize where P: FnMut(&T) -> bool, { let mut left = 0; let mut right = self.len(); while left != right { let mid = left + (right - left) / 2; // SAFETY: When `left < right`, `left <= mid < right`. // Therefore `left` always increases and `right` always decreases, // and either of them is selected. In both cases `left <= right` is // satisfied. Therefore if `left < right` in a step, `left <= right` // is satisfied in the next step. Therefore as long as `left != right`, // `0 <= left < right <= len` is satisfied and if this case // `0 <= mid < len` is satisfied too. let value = unsafe { self.get_unchecked(mid) }; if pred(value) { left = mid + 1; } else { right = mid; } } left } } #[lang = "slice_u8"] #[cfg(not(test))] impl [u8] { /// Checks if all bytes in this slice are within the ASCII range. #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn is_ascii(&self) -> bool { is_ascii(self) } /// Checks that two slices are an ASCII case-insensitive match. /// /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`, /// but without allocating and copying temporaries. #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool { self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a.eq_ignore_ascii_case(b)) } /// Converts this slice to its ASCII upper case equivalent in-place. /// /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// but non-ASCII letters are unchanged. /// /// To return a new uppercased value without modifying the existing one, use /// [`to_ascii_uppercase`]. /// /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn make_ascii_uppercase(&mut self) { for byte in self { byte.make_ascii_uppercase(); } } /// Converts this slice to its ASCII lower case equivalent in-place. /// /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// but non-ASCII letters are unchanged. /// /// To return a new lowercased value without modifying the existing one, use /// [`to_ascii_lowercase`]. /// /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn make_ascii_lowercase(&mut self) { for byte in self { byte.make_ascii_lowercase(); } } } /// Returns `true` if any byte in the word `v` is nonascii (>= 128). Snarfed /// from `../str/mod.rs`, which does something similar for utf8 validation. #[inline] fn contains_nonascii(v: usize) -> bool { const NONASCII_MASK: usize = 0x80808080_80808080u64 as usize; (NONASCII_MASK & v) != 0 } /// Optimized ASCII test that will use usize-at-a-time operations instead of /// byte-at-a-time operations (when possible). /// /// The algorithm we use here is pretty simple. If `s` is too short, we just /// check each byte and be done with it. Otherwise: /// /// - Read the first word with an unaligned load. /// - Align the pointer, read subsequent words until end with aligned loads. /// - Read the last `usize` from `s` with an unaligned load. /// /// If any of these loads produces something for which `contains_nonascii` /// (above) returns true, then we know the answer is false. #[inline] fn is_ascii(s: &[u8]) -> bool { const USIZE_SIZE: usize = mem::size_of::(); let len = s.len(); let align_offset = s.as_ptr().align_offset(USIZE_SIZE); // If we wouldn't gain anything from the word-at-a-time implementation, fall // back to a scalar loop. // // We also do this for architectures where `size_of::()` isn't // sufficient alignment for `usize`, because it's a weird edge case. if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::() { return s.iter().all(|b| b.is_ascii()); } // We always read the first word unaligned, which means `align_offset` is // 0, we'd read the same value again for the aligned read. let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset }; let start = s.as_ptr(); // SAFETY: We verify `len < USIZE_SIZE` above. let first_word = unsafe { (start as *const usize).read_unaligned() }; if contains_nonascii(first_word) { return false; } // We checked this above, somewhat implicitly. Note that `offset_to_aligned` // is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked // above. debug_assert!(offset_to_aligned <= len); // SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the // middle chunk of the slice. let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize }; // `byte_pos` is the byte index of `word_ptr`, used for loop end checks. let mut byte_pos = offset_to_aligned; // Paranoia check about alignment, since we're about to do a bunch of // unaligned loads. In practice this should be impossible barring a bug in // `align_offset` though. debug_assert_eq!((word_ptr as usize) % mem::align_of::(), 0); // Read subsequent words until the last aligned word, excluding the last // aligned word by itself to be done in tail check later, to ensure that // tail is always one `usize` at most to extra branch `byte_pos == len`. while byte_pos < len - USIZE_SIZE { debug_assert!( // Sanity check that the read is in bounds (word_ptr as usize + USIZE_SIZE) <= (start.wrapping_add(len) as usize) && // And that our assumptions about `byte_pos` hold. (word_ptr as usize) - (start as usize) == byte_pos ); // Safety: We know `word_ptr` is properly aligned (because of // `align_offset`), and we know that we have enough bytes between `word_ptr` and the end let word = unsafe { word_ptr.read() }; if contains_nonascii(word) { return false; } byte_pos += USIZE_SIZE; // SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that // after this `add`, `word_ptr` will be at most one-past-the-end. word_ptr = unsafe { word_ptr.add(1) }; } // Sanity check to ensure there really is only one `usize` left. This should // be guaranteed by our loop condition. debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE); // SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start. let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() }; !contains_nonascii(last_word) } #[stable(feature = "rust1", since = "1.0.0")] impl ops::Index for [T] where I: SliceIndex<[T]>, { type Output = I::Output; #[inline] fn index(&self, index: I) -> &I::Output { index.index(self) } } #[stable(feature = "rust1", since = "1.0.0")] impl ops::IndexMut for [T] where I: SliceIndex<[T]>, { #[inline] fn index_mut(&mut self, index: I) -> &mut I::Output { index.index_mut(self) } } #[inline(never)] #[cold] #[track_caller] fn slice_start_index_len_fail(index: usize, len: usize) -> ! { panic!("range start index {} out of range for slice of length {}", index, len); } #[inline(never)] #[cold] #[track_caller] fn slice_end_index_len_fail(index: usize, len: usize) -> ! { panic!("range end index {} out of range for slice of length {}", index, len); } #[inline(never)] #[cold] #[track_caller] fn slice_index_order_fail(index: usize, end: usize) -> ! { panic!("slice index starts at {} but ends at {}", index, end); } #[inline(never)] #[cold] #[track_caller] fn slice_start_index_overflow_fail() -> ! { panic!("attempted to index slice from after maximum usize"); } #[inline(never)] #[cold] #[track_caller] fn slice_end_index_overflow_fail() -> ! { panic!("attempted to index slice up to maximum usize"); } mod private_slice_index { use super::ops; #[stable(feature = "slice_get_slice", since = "1.28.0")] pub trait Sealed {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for usize {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for ops::Range {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for ops::RangeTo {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for ops::RangeFrom {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for ops::RangeFull {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for ops::RangeInclusive {} #[stable(feature = "slice_get_slice", since = "1.28.0")] impl Sealed for ops::RangeToInclusive {} } /// A helper trait used for indexing operations. /// /// Implementations of this trait have to promise that if the argument /// to `get_(mut_)unchecked` is a safe reference, then so is the result. #[stable(feature = "slice_get_slice", since = "1.28.0")] #[rustc_on_unimplemented( on(T = "str", label = "string indices are ranges of `usize`",), on( all(any(T = "str", T = "&str", T = "std::string::String"), _Self = "{integer}"), note = "you can use `.chars().nth()` or `.bytes().nth()` see chapter in The Book " ), message = "the type `{T}` cannot be indexed by `{Self}`", label = "slice indices are of type `usize` or ranges of `usize`" )] pub unsafe trait SliceIndex: private_slice_index::Sealed { /// The output type returned by methods. #[stable(feature = "slice_get_slice", since = "1.28.0")] type Output: ?Sized; /// Returns a shared reference to the output at this location, if in /// bounds. #[unstable(feature = "slice_index_methods", issue = "none")] fn get(self, slice: &T) -> Option<&Self::Output>; /// Returns a mutable reference to the output at this location, if in /// bounds. #[unstable(feature = "slice_index_methods", issue = "none")] fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>; /// Returns a shared reference to the output at this location, without /// performing any bounds checking. /// Calling this method with an out-of-bounds index or a dangling `slice` pointer /// is *[undefined behavior]* even if the resulting reference is not used. /// /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[unstable(feature = "slice_index_methods", issue = "none")] unsafe fn get_unchecked(self, slice: *const T) -> *const Self::Output; /// Returns a mutable reference to the output at this location, without /// performing any bounds checking. /// Calling this method with an out-of-bounds index or a dangling `slice` pointer /// is *[undefined behavior]* even if the resulting reference is not used. /// /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html #[unstable(feature = "slice_index_methods", issue = "none")] unsafe fn get_unchecked_mut(self, slice: *mut T) -> *mut Self::Output; /// Returns a shared reference to the output at this location, panicking /// if out of bounds. #[unstable(feature = "slice_index_methods", issue = "none")] #[track_caller] fn index(self, slice: &T) -> &Self::Output; /// Returns a mutable reference to the output at this location, panicking /// if out of bounds. #[unstable(feature = "slice_index_methods", issue = "none")] #[track_caller] fn index_mut(self, slice: &mut T) -> &mut Self::Output; } #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] unsafe impl SliceIndex<[T]> for usize { type Output = T; #[inline] fn get(self, slice: &[T]) -> Option<&T> { // SAFETY: `self` is checked to be in bounds. if self < slice.len() { unsafe { Some(&*self.get_unchecked(slice)) } } else { None } } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut T> { // SAFETY: `self` is checked to be in bounds. if self < slice.len() { unsafe { Some(&mut *self.get_unchecked_mut(slice)) } } else { None } } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const T { // SAFETY: the caller guarantees that `slice` is not dangling, so it // cannot be longer than `isize::MAX`. They also guarantee that // `self` is in bounds of `slice` so `self` cannot overflow an `isize`, // so the call to `add` is safe. unsafe { slice.as_ptr().add(self) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T { // SAFETY: see comments for `get_unchecked` above. unsafe { slice.as_mut_ptr().add(self) } } #[inline] fn index(self, slice: &[T]) -> &T { // N.B., use intrinsic indexing &(*slice)[self] } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut T { // N.B., use intrinsic indexing &mut (*slice)[self] } } #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] unsafe impl SliceIndex<[T]> for ops::Range { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { if self.start > self.end || self.end > slice.len() { None } else { // SAFETY: `self` is checked to be valid and in bounds above. unsafe { Some(&*self.get_unchecked(slice)) } } } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { if self.start > self.end || self.end > slice.len() { None } else { // SAFETY: `self` is checked to be valid and in bounds above. unsafe { Some(&mut *self.get_unchecked_mut(slice)) } } } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { // SAFETY: the caller guarantees that `slice` is not dangling, so it // cannot be longer than `isize::MAX`. They also guarantee that // `self` is in bounds of `slice` so `self` cannot overflow an `isize`, // so the call to `add` is safe. unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { // SAFETY: see comments for `get_unchecked` above. unsafe { ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start) } } #[inline] fn index(self, slice: &[T]) -> &[T] { if self.start > self.end { slice_index_order_fail(self.start, self.end); } else if self.end > slice.len() { slice_end_index_len_fail(self.end, slice.len()); } // SAFETY: `self` is checked to be valid and in bounds above. unsafe { &*self.get_unchecked(slice) } } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { if self.start > self.end { slice_index_order_fail(self.start, self.end); } else if self.end > slice.len() { slice_end_index_len_fail(self.end, slice.len()); } // SAFETY: `self` is checked to be valid and in bounds above. unsafe { &mut *self.get_unchecked_mut(slice) } } } #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] unsafe impl SliceIndex<[T]> for ops::RangeTo { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { (0..self.end).get(slice) } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { (0..self.end).get_mut(slice) } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. unsafe { (0..self.end).get_unchecked(slice) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. unsafe { (0..self.end).get_unchecked_mut(slice) } } #[inline] fn index(self, slice: &[T]) -> &[T] { (0..self.end).index(slice) } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { (0..self.end).index_mut(slice) } } #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] unsafe impl SliceIndex<[T]> for ops::RangeFrom { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { (self.start..slice.len()).get(slice) } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { (self.start..slice.len()).get_mut(slice) } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. unsafe { (self.start..slice.len()).get_unchecked(slice) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. unsafe { (self.start..slice.len()).get_unchecked_mut(slice) } } #[inline] fn index(self, slice: &[T]) -> &[T] { if self.start > slice.len() { slice_start_index_len_fail(self.start, slice.len()); } // SAFETY: `self` is checked to be valid and in bounds above. unsafe { &*self.get_unchecked(slice) } } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { if self.start > slice.len() { slice_start_index_len_fail(self.start, slice.len()); } // SAFETY: `self` is checked to be valid and in bounds above. unsafe { &mut *self.get_unchecked_mut(slice) } } } #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] unsafe impl SliceIndex<[T]> for ops::RangeFull { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { Some(slice) } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { Some(slice) } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { slice } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { slice } #[inline] fn index(self, slice: &[T]) -> &[T] { slice } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { slice } } #[stable(feature = "inclusive_range", since = "1.26.0")] unsafe impl SliceIndex<[T]> for ops::RangeInclusive { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) } } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get_mut(slice) } } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. unsafe { (*self.start()..self.end() + 1).get_unchecked(slice) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. unsafe { (*self.start()..self.end() + 1).get_unchecked_mut(slice) } } #[inline] fn index(self, slice: &[T]) -> &[T] { if *self.end() == usize::MAX { slice_end_index_overflow_fail(); } (*self.start()..self.end() + 1).index(slice) } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { if *self.end() == usize::MAX { slice_end_index_overflow_fail(); } (*self.start()..self.end() + 1).index_mut(slice) } } #[stable(feature = "inclusive_range", since = "1.26.0")] unsafe impl SliceIndex<[T]> for ops::RangeToInclusive { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { (0..=self.end).get(slice) } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { (0..=self.end).get_mut(slice) } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. unsafe { (0..=self.end).get_unchecked(slice) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. unsafe { (0..=self.end).get_unchecked_mut(slice) } } #[inline] fn index(self, slice: &[T]) -> &[T] { (0..=self.end).index(slice) } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { (0..=self.end).index_mut(slice) } } //////////////////////////////////////////////////////////////////////////////// // Common traits //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] impl Default for &[T] { /// Creates an empty slice. fn default() -> Self { &[] } } #[stable(feature = "mut_slice_default", since = "1.5.0")] impl Default for &mut [T] { /// Creates a mutable empty slice. fn default() -> Self { &mut [] } } // // Iterators // #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a [T] { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a mut [T] { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(self) -> IterMut<'a, T> { self.iter_mut() } } // Macro helper functions #[inline(always)] fn size_from_ptr(_: *const T) -> usize { mem::size_of::() } // Inlining is_empty and len makes a huge performance difference macro_rules! is_empty { // The way we encode the length of a ZST iterator, this works both for ZST // and non-ZST. ($self: ident) => { $self.ptr.as_ptr() as *const T == $self.end }; } // To get rid of some bounds checks (see `position`), we compute the length in a somewhat // unexpected way. (Tested by `codegen/slice-position-bounds-check`.) macro_rules! len { ($self: ident) => {{ #![allow(unused_unsafe)] // we're sometimes used within an unsafe block let start = $self.ptr; let size = size_from_ptr(start.as_ptr()); if size == 0 { // This _cannot_ use `unchecked_sub` because we depend on wrapping // to represent the length of long ZST slice iterators. ($self.end as usize).wrapping_sub(start.as_ptr() as usize) } else { // We know that `start <= end`, so can do better than `offset_from`, // which needs to deal in signed. By setting appropriate flags here // we can tell LLVM this, which helps it remove bounds checks. // SAFETY: By the type invariant, `start <= end` let diff = unsafe { unchecked_sub($self.end as usize, start.as_ptr() as usize) }; // By also telling LLVM that the pointers are apart by an exact // multiple of the type size, it can optimize `len() == 0` down to // `start == end` instead of `(end - start) < size`. // SAFETY: By the type invariant, the pointers are aligned so the // distance between them must be a multiple of pointee size unsafe { exact_div(diff, size) } } }}; } // The shared definition of the `Iter` and `IterMut` iterators macro_rules! iterator { ( struct $name:ident -> $ptr:ty, $elem:ty, $raw_mut:tt, {$( $mut_:tt )?}, {$($extra:tt)*} ) => { // Returns the first element and moves the start of the iterator forwards by 1. // Greatly improves performance compared to an inlined function. The iterator // must not be empty. macro_rules! next_unchecked { ($self: ident) => {& $( $mut_ )? *$self.post_inc_start(1)} } // Returns the last element and moves the end of the iterator backwards by 1. // Greatly improves performance compared to an inlined function. The iterator // must not be empty. macro_rules! next_back_unchecked { ($self: ident) => {& $( $mut_ )? *$self.pre_dec_end(1)} } // Shrinks the iterator when T is a ZST, by moving the end of the iterator // backwards by `n`. `n` must not exceed `self.len()`. macro_rules! zst_shrink { ($self: ident, $n: ident) => { $self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T; } } impl<'a, T> $name<'a, T> { // Helper function for creating a slice from the iterator. #[inline(always)] fn make_slice(&self) -> &'a [T] { // SAFETY: the iterator was created from a slice with pointer // `self.ptr` and length `len!(self)`. This guarantees that all // the prerequisites for `from_raw_parts` are fulfilled. unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } } // Helper function for moving the start of the iterator forwards by `offset` elements, // returning the old start. // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T { if mem::size_of::() == 0 { zst_shrink!(self, offset); self.ptr.as_ptr() } else { let old = self.ptr.as_ptr(); // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, // so this new pointer is inside `self` and thus guaranteed to be non-null. self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) }; old } } // Helper function for moving the end of the iterator backwards by `offset` elements, // returning the new end. // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T { if mem::size_of::() == 0 { zst_shrink!(self, offset); self.ptr.as_ptr() } else { // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, // which is guaranteed to not overflow an `isize`. Also, the resulting pointer // is in bounds of `slice`, which fulfills the other requirements for `offset`. self.end = unsafe { self.end.offset(-offset) }; self.end } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for $name<'_, T> { #[inline(always)] fn len(&self) -> usize { len!(self) } #[inline(always)] fn is_empty(&self) -> bool { is_empty!(self) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for $name<'a, T> { type Item = $elem; #[inline] fn next(&mut self) -> Option<$elem> { // could be implemented with slices, but this avoids bounds checks // SAFETY: `assume` calls are safe since a slice's start pointer // must be non-null, and slices over non-ZSTs must also have a // non-null end pointer. The call to `next_unchecked!` is safe // since we check if the iterator is empty first. unsafe { assume(!self.ptr.as_ptr().is_null()); if mem::size_of::() != 0 { assume(!self.end.is_null()); } if is_empty!(self) { None } else { Some(next_unchecked!(self)) } } } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = len!(self); (exact, Some(exact)) } #[inline] fn count(self) -> usize { len!(self) } #[inline] fn nth(&mut self, n: usize) -> Option<$elem> { if n >= len!(self) { // This iterator is now empty. if mem::size_of::() == 0 { // We have to do it this way as `ptr` may never be 0, but `end` // could be (due to wrapping). self.end = self.ptr.as_ptr(); } else { // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr unsafe { self.ptr = NonNull::new_unchecked(self.end as *mut T); } } return None; } // SAFETY: We are in bounds. `post_inc_start` does the right thing even for ZSTs. unsafe { self.post_inc_start(n as isize); Some(next_unchecked!(self)) } } #[inline] fn last(mut self) -> Option<$elem> { self.next_back() } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. #[inline] fn for_each(mut self, mut f: F) where Self: Sized, F: FnMut(Self::Item), { while let Some(x) = self.next() { f(x); } } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. #[inline] fn all(&mut self, mut f: F) -> bool where Self: Sized, F: FnMut(Self::Item) -> bool, { while let Some(x) = self.next() { if !f(x) { return false; } } true } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. #[inline] fn any(&mut self, mut f: F) -> bool where Self: Sized, F: FnMut(Self::Item) -> bool, { while let Some(x) = self.next() { if f(x) { return true; } } false } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. #[inline] fn find

(&mut self, mut predicate: P) -> Option where Self: Sized, P: FnMut(&Self::Item) -> bool, { while let Some(x) = self.next() { if predicate(&x) { return Some(x); } } None } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. #[inline] fn find_map(&mut self, mut f: F) -> Option where Self: Sized, F: FnMut(Self::Item) -> Option, { while let Some(x) = self.next() { if let Some(y) = f(x) { return Some(y); } } None } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. Also, the `assume` avoids a bounds check. #[inline] #[rustc_inherit_overflow_checks] fn position

(&mut self, mut predicate: P) -> Option where Self: Sized, P: FnMut(Self::Item) -> bool, { let n = len!(self); let mut i = 0; while let Some(x) = self.next() { if predicate(x) { // SAFETY: we are guaranteed to be in bounds by the loop invariant: // when `i >= n`, `self.next()` returns `None` and the loop breaks. unsafe { assume(i < n) }; return Some(i); } i += 1; } None } // We override the default implementation, which uses `try_fold`, // because this simple implementation generates less LLVM IR and is // faster to compile. Also, the `assume` avoids a bounds check. #[inline] fn rposition

(&mut self, mut predicate: P) -> Option where P: FnMut(Self::Item) -> bool, Self: Sized + ExactSizeIterator + DoubleEndedIterator { let n = len!(self); let mut i = n; while let Some(x) = self.next_back() { i -= 1; if predicate(x) { // SAFETY: `i` must be lower than `n` since it starts at `n` // and is only decreasing. unsafe { assume(i < n) }; return Some(i); } } None } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: the caller must guarantee that `i` is in bounds of // the underlying slice, so `i` cannot overflow an `isize`, and // the returned references is guaranteed to refer to an element // of the slice and thus guaranteed to be valid. // // Also note that the caller also guarantees that we're never // called with the same index again, and that no other methods // that will access this subslice are called, so it is valid // for the returned reference to be mutable in the case of // `IterMut` unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } } $($extra)* } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for $name<'a, T> { #[inline] fn next_back(&mut self) -> Option<$elem> { // could be implemented with slices, but this avoids bounds checks // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null, // and slices over non-ZSTs must also have a non-null end pointer. // The call to `next_back_unchecked!` is safe since we check if the iterator is // empty first. unsafe { assume(!self.ptr.as_ptr().is_null()); if mem::size_of::() != 0 { assume(!self.end.is_null()); } if is_empty!(self) { None } else { Some(next_back_unchecked!(self)) } } } #[inline] fn nth_back(&mut self, n: usize) -> Option<$elem> { if n >= len!(self) { // This iterator is now empty. self.end = self.ptr.as_ptr(); return None; } // SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs. unsafe { self.pre_dec_end(n as isize); Some(next_back_unchecked!(self)) } } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for $name<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for $name<'_, T> {} } } /// Immutable slice iterator /// /// This struct is created by the [`iter`] method on [slices]. /// /// # Examples /// /// Basic usage: /// /// ``` /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]): /// let slice = &[1, 2, 3]; /// /// // Then, we iterate over it: /// for element in slice.iter() { /// println!("{}", element); /// } /// ``` /// /// [`iter`]: ../../std/primitive.slice.html#method.iter /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { ptr: NonNull, end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that // ptr == end is a quick test for the Iterator being empty, that works // for both ZST and non-ZST. _marker: marker::PhantomData<&'a T>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Iter").field(&self.as_slice()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for Iter<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for Iter<'_, T> {} impl<'a, T> Iter<'a, T> { /// Views the underlying data as a subslice of the original data. /// /// This has the same lifetime as the original slice, and so the /// iterator can continue to be used while this exists. /// /// # Examples /// /// Basic usage: /// /// ``` /// // First, we declare a type which has the `iter` method to get the `Iter` /// // struct (&[usize here]): /// let slice = &[1, 2, 3]; /// /// // Then, we get the iterator: /// let mut iter = slice.iter(); /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]": /// println!("{:?}", iter.as_slice()); /// /// // Next, we move to the second element of the slice: /// iter.next(); /// // Now `as_slice` returns "[2, 3]": /// println!("{:?}", iter.as_slice()); /// ``` #[stable(feature = "iter_to_slice", since = "1.4.0")] pub fn as_slice(&self) -> &'a [T] { self.make_slice() } } iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, { fn is_sorted_by(self, mut compare: F) -> bool where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Option, { self.as_slice().windows(2).all(|w| { compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false) }) } }} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } } } #[stable(feature = "slice_iter_as_ref", since = "1.13.0")] impl AsRef<[T]> for Iter<'_, T> { fn as_ref(&self) -> &[T] { self.as_slice() } } /// Mutable slice iterator. /// /// This struct is created by the [`iter_mut`] method on [slices]. /// /// # Examples /// /// Basic usage: /// /// ``` /// // First, we declare a type which has `iter_mut` method to get the `IterMut` /// // struct (&[usize here]): /// let mut slice = &mut [1, 2, 3]; /// /// // Then, we iterate over it and increment each element value: /// for element in slice.iter_mut() { /// *element += 1; /// } /// /// // We now have "[2, 3, 4]": /// println!("{:?}", slice); /// ``` /// /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, T: 'a> { ptr: NonNull, end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that // ptr == end is a quick test for the Iterator being empty, that works // for both ZST and non-ZST. _marker: marker::PhantomData<&'a mut T>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for IterMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("IterMut").field(&self.make_slice()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for IterMut<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for IterMut<'_, T> {} impl<'a, T> IterMut<'a, T> { /// Views the underlying data as a subslice of the original data. /// /// To avoid creating `&mut` references that alias, this is forced /// to consume the iterator. /// /// # Examples /// /// Basic usage: /// /// ``` /// // First, we declare a type which has `iter_mut` method to get the `IterMut` /// // struct (&[usize here]): /// let mut slice = &mut [1, 2, 3]; /// /// { /// // Then, we get the iterator: /// let mut iter = slice.iter_mut(); /// // We move to next element: /// iter.next(); /// // So if we print what `into_slice` method returns here, we have "[2, 3]": /// println!("{:?}", iter.into_slice()); /// } /// /// // Now let's modify a value of the slice: /// { /// // First we get back the iterator: /// let mut iter = slice.iter_mut(); /// // We change the value of the first element of the slice returned by the `next` method: /// *iter.next().unwrap() += 1; /// } /// // Now slice is "[2, 2, 3]": /// println!("{:?}", slice); /// ``` #[stable(feature = "iter_to_slice", since = "1.4.0")] pub fn into_slice(self) -> &'a mut [T] { // SAFETY: the iterator was created from a mutable slice with pointer // `self.ptr` and length `len!(self)`. This guarantees that all the prerequisites // for `from_raw_parts_mut` are fulfilled. unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) } } /// Views the underlying data as a subslice of the original data. /// /// To avoid creating `&mut [T]` references that alias, the returned slice /// borrows its lifetime from the iterator the method is applied on. /// /// # Examples /// /// Basic usage: /// /// ``` /// # #![feature(slice_iter_mut_as_slice)] /// let mut slice: &mut [usize] = &mut [1, 2, 3]; /// /// // First, we get the iterator: /// let mut iter = slice.iter_mut(); /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]": /// assert_eq!(iter.as_slice(), &[1, 2, 3]); /// /// // Next, we move to the second element of the slice: /// iter.next(); /// // Now `as_slice` returns "[2, 3]": /// assert_eq!(iter.as_slice(), &[2, 3]); /// ``` #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")] pub fn as_slice(&self) -> &[T] { self.make_slice() } } iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}} /// An internal abstraction over the splitting iterators, so that /// splitn, splitn_mut etc can be implemented once. #[doc(hidden)] trait SplitIter: DoubleEndedIterator { /// Marks the underlying iterator as complete, extracting the remaining /// portion of the slice. fn finish(&mut self) -> Option; } /// An iterator over subslices separated by elements that match a predicate /// function. /// /// This struct is created by the [`split`] method on [slices]. /// /// [`split`]: ../../std/primitive.slice.html#method.split /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Split<'a, T: 'a, P> where P: FnMut(&T) -> bool, { v: &'a [T], pred: P, finished: bool, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Split").field("v", &self.v).field("finished", &self.finished).finish() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool, { fn clone(&self) -> Self { Split { v: self.v, pred: self.pred.clone(), finished: self.finished } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool, { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.finished { return None; } match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { let ret = Some(&self.v[..idx]); self.v = &self.v[idx + 1..]; ret } } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.finished { return None; } match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { let ret = Some(&self.v[idx + 1..]); self.v = &self.v[..idx]; ret } } } } impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn finish(&mut self) -> Option<&'a [T]> { if self.finished { None } else { self.finished = true; Some(self.v) } } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {} /// An iterator over subslices separated by elements that match a predicate /// function. Unlike `Split`, it contains the matched part as a terminator /// of the subslice. /// /// This struct is created by the [`split_inclusive`] method on [slices]. /// /// [`split_inclusive`]: ../../std/primitive.slice.html#method.split_inclusive /// [slices]: ../../std/primitive.slice.html #[unstable(feature = "split_inclusive", issue = "72360")] pub struct SplitInclusive<'a, T: 'a, P> where P: FnMut(&T) -> bool, { v: &'a [T], pred: P, finished: bool, } #[unstable(feature = "split_inclusive", issue = "72360")] impl fmt::Debug for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SplitInclusive") .field("v", &self.v) .field("finished", &self.finished) .finish() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[unstable(feature = "split_inclusive", issue = "72360")] impl Clone for SplitInclusive<'_, T, P> where P: Clone + FnMut(&T) -> bool, { fn clone(&self) -> Self { SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished } } } #[unstable(feature = "split_inclusive", issue = "72360")] impl<'a, T, P> Iterator for SplitInclusive<'a, T, P> where P: FnMut(&T) -> bool, { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.finished { return None; } let idx = self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len()); if idx == self.v.len() { self.finished = true; } let ret = Some(&self.v[..idx]); self.v = &self.v[idx..]; ret } #[inline] fn size_hint(&self) -> (usize, Option) { if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) } } } #[unstable(feature = "split_inclusive", issue = "72360")] impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.finished { return None; } // The last index of self.v is already checked and found to match // by the last iteration, so we start searching a new match // one index to the left. let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] }; let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0); if idx == 0 { self.finished = true; } let ret = Some(&self.v[idx..]); self.v = &self.v[..idx]; ret } } #[unstable(feature = "split_inclusive", issue = "72360")] impl FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {} /// An iterator over the mutable subslices of the vector which are separated /// by elements that match `pred`. /// /// This struct is created by the [`split_mut`] method on [slices]. /// /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct SplitMut<'a, T: 'a, P> where P: FnMut(&T) -> bool, { v: &'a mut [T], pred: P, finished: bool, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SplitMut").field("v", &self.v).field("finished", &self.finished).finish() } } impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn finish(&mut self) -> Option<&'a mut [T]> { if self.finished { None } else { self.finished = true; Some(mem::replace(&mut self.v, &mut [])) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool, { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.finished { return None; } let idx_opt = { // work around borrowck limitations let pred = &mut self.pred; self.v.iter().position(|x| (*pred)(x)) }; match idx_opt { None => self.finish(), Some(idx) => { let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(idx); self.v = &mut tail[1..]; Some(head) } } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.finished { (0, Some(0)) } else { // if the predicate doesn't match anything, we yield one slice // if it matches every element, we yield len+1 empty slices. (1, Some(self.v.len() + 1)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.finished { return None; } let idx_opt = { // work around borrowck limitations let pred = &mut self.pred; self.v.iter().rposition(|x| (*pred)(x)) }; match idx_opt { None => self.finish(), Some(idx) => { let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(idx); self.v = head; Some(&mut tail[1..]) } } } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {} /// An iterator over the mutable subslices of the vector which are separated /// by elements that match `pred`. Unlike `SplitMut`, it contains the matched /// parts in the ends of the subslices. /// /// This struct is created by the [`split_inclusive_mut`] method on [slices]. /// /// [`split_inclusive_mut`]: ../../std/primitive.slice.html#method.split_inclusive_mut /// [slices]: ../../std/primitive.slice.html #[unstable(feature = "split_inclusive", issue = "72360")] pub struct SplitInclusiveMut<'a, T: 'a, P> where P: FnMut(&T) -> bool, { v: &'a mut [T], pred: P, finished: bool, } #[unstable(feature = "split_inclusive", issue = "72360")] impl fmt::Debug for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SplitInclusiveMut") .field("v", &self.v) .field("finished", &self.finished) .finish() } } #[unstable(feature = "split_inclusive", issue = "72360")] impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P> where P: FnMut(&T) -> bool, { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.finished { return None; } let idx_opt = { // work around borrowck limitations let pred = &mut self.pred; self.v.iter().position(|x| (*pred)(x)) }; let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len()); if idx == self.v.len() { self.finished = true; } let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(idx); self.v = tail; Some(head) } #[inline] fn size_hint(&self) -> (usize, Option) { if self.finished { (0, Some(0)) } else { // if the predicate doesn't match anything, we yield one slice // if it matches every element, we yield len+1 empty slices. (1, Some(self.v.len() + 1)) } } } #[unstable(feature = "split_inclusive", issue = "72360")] impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.finished { return None; } let idx_opt = if self.v.is_empty() { None } else { // work around borrowck limitations let pred = &mut self.pred; // The last index of self.v is already checked and found to match // by the last iteration, so we start searching a new match // one index to the left. let remainder = &self.v[..(self.v.len() - 1)]; remainder.iter().rposition(|x| (*pred)(x)) }; let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0); if idx == 0 { self.finished = true; } let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(idx); self.v = head; Some(tail) } } #[unstable(feature = "split_inclusive", issue = "72360")] impl FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {} /// An iterator over subslices separated by elements that match a predicate /// function, starting from the end of the slice. /// /// This struct is created by the [`rsplit`] method on [slices]. /// /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit /// [slices]: ../../std/primitive.slice.html #[stable(feature = "slice_rsplit", since = "1.27.0")] #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`? pub struct RSplit<'a, T: 'a, P> where P: FnMut(&T) -> bool, { inner: Split<'a, T, P>, } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RSplit") .field("v", &self.inner.v) .field("finished", &self.inner.finished) .finish() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool, { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { self.inner.next_back() } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { self.inner.next() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn finish(&mut self) -> Option<&'a [T]> { self.inner.finish() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {} /// An iterator over the subslices of the vector which are separated /// by elements that match `pred`, starting from the end of the slice. /// /// This struct is created by the [`rsplit_mut`] method on [slices]. /// /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "slice_rsplit", since = "1.27.0")] pub struct RSplitMut<'a, T: 'a, P> where P: FnMut(&T) -> bool, { inner: SplitMut<'a, T, P>, } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RSplitMut") .field("v", &self.inner.v) .field("finished", &self.inner.finished) .finish() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn finish(&mut self) -> Option<&'a mut [T]> { self.inner.finish() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool, { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { self.inner.next_back() } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { self.inner.next() } } #[stable(feature = "slice_rsplit", since = "1.27.0")] impl FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {} /// An private iterator over subslices separated by elements that /// match a predicate function, splitting at most a fixed number of /// times. #[derive(Debug)] struct GenericSplitN { iter: I, count: usize, } impl> Iterator for GenericSplitN { type Item = T; #[inline] fn next(&mut self) -> Option { match self.count { 0 => None, 1 => { self.count -= 1; self.iter.finish() } _ => { self.count -= 1; self.iter.next() } } } #[inline] fn size_hint(&self) -> (usize, Option) { let (lower, upper_opt) = self.iter.size_hint(); (lower, upper_opt.map(|upper| cmp::min(self.count, upper))) } } /// An iterator over subslices separated by elements that match a predicate /// function, limited to a given number of splits. /// /// This struct is created by the [`splitn`] method on [slices]. /// /// [`splitn`]: ../../std/primitive.slice.html#method.splitn /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool, { inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SplitN").field("inner", &self.inner).finish() } } /// An iterator over subslices separated by elements that match a /// predicate function, limited to a given number of splits, starting /// from the end of the slice. /// /// This struct is created by the [`rsplitn`] method on [slices]. /// /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool, { inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RSplitN").field("inner", &self.inner).finish() } } /// An iterator over subslices separated by elements that match a predicate /// function, limited to a given number of splits. /// /// This struct is created by the [`splitn_mut`] method on [slices]. /// /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool, { inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SplitNMut").field("inner", &self.inner).finish() } } /// An iterator over subslices separated by elements that match a /// predicate function, limited to a given number of splits, starting /// from the end of the slice. /// /// This struct is created by the [`rsplitn_mut`] method on [slices]. /// /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool, { inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RSplitNMut").field("inner", &self.inner).finish() } } macro_rules! forward_iterator { ($name:ident: $elem:ident, $iter_of:ty) => { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool, { type Item = $iter_of; #[inline] fn next(&mut self) -> Option<$iter_of> { self.inner.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {} }; } forward_iterator! { SplitN: T, &'a [T] } forward_iterator! { RSplitN: T, &'a [T] } forward_iterator! { SplitNMut: T, &'a mut [T] } forward_iterator! { RSplitNMut: T, &'a mut [T] } /// An iterator over overlapping subslices of length `size`. /// /// This struct is created by the [`windows`] method on [slices]. /// /// [`windows`]: ../../std/primitive.slice.html#method.windows /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Windows<'a, T: 'a> { v: &'a [T], size: usize, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Windows<'_, T> { fn clone(&self) -> Self { Windows { v: self.v, size: self.size } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Windows<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.size > self.v.len() { None } else { let ret = Some(&self.v[..self.size]); self.v = &self.v[1..]; ret } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.size > self.v.len() { (0, Some(0)) } else { let size = self.v.len() - self.size + 1; (size, Some(size)) } } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option { let (end, overflow) = self.size.overflowing_add(n); if end > self.v.len() || overflow { self.v = &[]; None } else { let nth = &self.v[n..end]; self.v = &self.v[n + 1..]; Some(nth) } } #[inline] fn last(self) -> Option { if self.size > self.v.len() { None } else { let start = self.v.len() - self.size; Some(&self.v[start..]) } } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: since the caller guarantees that `i` is in bounds, // which means that `i` cannot overflow an `isize`, and the // slice created by `from_raw_parts` is a subslice of `self.v` // thus is guaranteed to be valid for the lifetime `'a` of `self.v`. unsafe { from_raw_parts(self.v.as_ptr().add(idx), self.size) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Windows<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.size > self.v.len() { None } else { let ret = Some(&self.v[self.v.len() - self.size..]); self.v = &self.v[..self.v.len() - 1]; ret } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let (end, overflow) = self.v.len().overflowing_sub(n); if end < self.size || overflow { self.v = &[]; None } else { let ret = &self.v[end - self.size..end]; self.v = &self.v[..end - 1]; Some(ret) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Windows<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Windows<'_, T> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Windows<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a /// time), starting at the beginning of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last slice /// of the iteration will be the remainder. /// /// This struct is created by the [`chunks`] method on [slices]. /// /// [`chunks`]: ../../std/primitive.slice.html#method.chunks /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Chunks<'a, T: 'a> { v: &'a [T], chunk_size: usize, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Chunks<'_, T> { fn clone(&self) -> Self { Chunks { v: self.v, chunk_size: self.chunk_size } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Chunks<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.v.is_empty() { None } else { let chunksz = cmp::min(self.v.len(), self.chunk_size); let (fst, snd) = self.v.split_at(chunksz); self.v = snd; Some(fst) } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.v.is_empty() { (0, Some(0)) } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option { let (start, overflow) = n.overflowing_mul(self.chunk_size); if start >= self.v.len() || overflow { self.v = &[]; None } else { let end = match start.checked_add(self.chunk_size) { Some(sum) => cmp::min(self.v.len(), sum), None => self.v.len(), }; let nth = &self.v[start..end]; self.v = &self.v[end..]; Some(nth) } } #[inline] fn last(self) -> Option { if self.v.is_empty() { None } else { let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size; Some(&self.v[start..]) } } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; let end = match start.checked_add(self.chunk_size) { None => self.v.len(), Some(end) => cmp::min(end, self.v.len()), }; // SAFETY: the caller guarantees that `i` is in bounds, // which means that `start` must be in bounds of the // underlying `self.v` slice, and we made sure that `end` // is also in bounds of `self.v`. Thus, `start` cannot overflow // an `isize`, and the slice constructed by `from_raw_parts` // is a subslice of `self.v` which is guaranteed to be valid // for the lifetime `'a` of `self.v`. unsafe { from_raw_parts(self.v.as_ptr().add(start), end - start) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Chunks<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.v.is_empty() { None } else { let remainder = self.v.len() % self.chunk_size; let chunksz = if remainder != 0 { remainder } else { self.chunk_size }; let (fst, snd) = self.v.split_at(self.v.len() - chunksz); self.v = fst; Some(snd) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &[]; None } else { let start = (len - 1 - n) * self.chunk_size; let end = match start.checked_add(self.chunk_size) { Some(res) => cmp::min(res, self.v.len()), None => self.v.len(), }; let nth_back = &self.v[start..end]; self.v = &self.v[..start]; Some(nth_back) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Chunks<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Chunks<'_, T> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Chunks<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` /// elements at a time), starting at the beginning of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last slice /// of the iteration will be the remainder. /// /// This struct is created by the [`chunks_mut`] method on [slices]. /// /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct ChunksMut<'a, T: 'a> { v: &'a mut [T], chunk_size: usize, } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for ChunksMut<'a, T> { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.v.is_empty() { None } else { let sz = cmp::min(self.v.len(), self.chunk_size); let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(sz); self.v = tail; Some(head) } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.v.is_empty() { (0, Some(0)) } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option<&'a mut [T]> { let (start, overflow) = n.overflowing_mul(self.chunk_size); if start >= self.v.len() || overflow { self.v = &mut []; None } else { let end = match start.checked_add(self.chunk_size) { Some(sum) => cmp::min(self.v.len(), sum), None => self.v.len(), }; let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(end); let (_, nth) = head.split_at_mut(start); self.v = tail; Some(nth) } } #[inline] fn last(self) -> Option { if self.v.is_empty() { None } else { let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size; Some(&mut self.v[start..]) } } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; let end = match start.checked_add(self.chunk_size) { None => self.v.len(), Some(end) => cmp::min(end, self.v.len()), }; // SAFETY: see comments for `Chunks::get_unchecked`. // // Also note that the caller also guarantees that we're never called // with the same index again, and that no other methods that will // access this subslice are called, so it is valid for the returned // slice to be mutable. unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.v.is_empty() { None } else { let remainder = self.v.len() % self.chunk_size; let sz = if remainder != 0 { remainder } else { self.chunk_size }; let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (head, tail) = tmp.split_at_mut(tmp_len - sz); self.v = head; Some(tail) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &mut []; None } else { let start = (len - 1 - n) * self.chunk_size; let end = match start.checked_add(self.chunk_size) { Some(res) => cmp::min(res, self.v.len()), None => self.v.len(), }; let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end); let (head, nth_back) = temp.split_at_mut(start); self.v = head; Some(nth_back) } } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for ChunksMut<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for ChunksMut<'_, T> {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for ChunksMut<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a /// time), starting at the beginning of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last /// up to `chunk_size-1` elements will be omitted but can be retrieved from /// the [`remainder`] function from the iterator. /// /// This struct is created by the [`chunks_exact`] method on [slices]. /// /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact /// [`remainder`]: ChunksExact::remainder /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "chunks_exact", since = "1.31.0")] pub struct ChunksExact<'a, T: 'a> { v: &'a [T], rem: &'a [T], chunk_size: usize, } impl<'a, T> ChunksExact<'a, T> { /// Returns the remainder of the original slice that is not going to be /// returned by the iterator. The returned slice has at most `chunk_size-1` /// elements. #[stable(feature = "chunks_exact", since = "1.31.0")] pub fn remainder(&self) -> &'a [T] { self.rem } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "chunks_exact", since = "1.31.0")] impl Clone for ChunksExact<'_, T> { fn clone(&self) -> Self { ChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size } } } #[stable(feature = "chunks_exact", since = "1.31.0")] impl<'a, T> Iterator for ChunksExact<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.v.len() < self.chunk_size { None } else { let (fst, snd) = self.v.split_at(self.chunk_size); self.v = snd; Some(fst) } } #[inline] fn size_hint(&self) -> (usize, Option) { let n = self.v.len() / self.chunk_size; (n, Some(n)) } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option { let (start, overflow) = n.overflowing_mul(self.chunk_size); if start >= self.v.len() || overflow { self.v = &[]; None } else { let (_, snd) = self.v.split_at(start); self.v = snd; self.next() } } #[inline] fn last(mut self) -> Option { self.next_back() } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: mostly identical to `Chunks::get_unchecked`. unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) } } } #[stable(feature = "chunks_exact", since = "1.31.0")] impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.v.len() < self.chunk_size { None } else { let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size); self.v = fst; Some(snd) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &[]; None } else { let start = (len - 1 - n) * self.chunk_size; let end = start + self.chunk_size; let nth_back = &self.v[start..end]; self.v = &self.v[..start]; Some(nth_back) } } } #[stable(feature = "chunks_exact", since = "1.31.0")] impl ExactSizeIterator for ChunksExact<'_, T> { fn is_empty(&self) -> bool { self.v.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for ChunksExact<'_, T> {} #[stable(feature = "chunks_exact", since = "1.31.0")] impl FusedIterator for ChunksExact<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` /// elements at a time), starting at the beginning of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last up to /// `chunk_size-1` elements will be omitted but can be retrieved from the /// [`into_remainder`] function from the iterator. /// /// This struct is created by the [`chunks_exact_mut`] method on [slices]. /// /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut /// [`into_remainder`]: ChunksExactMut::into_remainder /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "chunks_exact", since = "1.31.0")] pub struct ChunksExactMut<'a, T: 'a> { v: &'a mut [T], rem: &'a mut [T], chunk_size: usize, } impl<'a, T> ChunksExactMut<'a, T> { /// Returns the remainder of the original slice that is not going to be /// returned by the iterator. The returned slice has at most `chunk_size-1` /// elements. #[stable(feature = "chunks_exact", since = "1.31.0")] pub fn into_remainder(self) -> &'a mut [T] { self.rem } } #[stable(feature = "chunks_exact", since = "1.31.0")] impl<'a, T> Iterator for ChunksExactMut<'a, T> { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.v.len() < self.chunk_size { None } else { let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(self.chunk_size); self.v = tail; Some(head) } } #[inline] fn size_hint(&self) -> (usize, Option) { let n = self.v.len() / self.chunk_size; (n, Some(n)) } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option<&'a mut [T]> { let (start, overflow) = n.overflowing_mul(self.chunk_size); if start >= self.v.len() || overflow { self.v = &mut []; None } else { let tmp = mem::replace(&mut self.v, &mut []); let (_, snd) = tmp.split_at_mut(start); self.v = snd; self.next() } } #[inline] fn last(mut self) -> Option { self.next_back() } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: see comments for `ChunksMut::get_unchecked`. unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) } } } #[stable(feature = "chunks_exact", since = "1.31.0")] impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.v.len() < self.chunk_size { None } else { let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size); self.v = head; Some(tail) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &mut []; None } else { let start = (len - 1 - n) * self.chunk_size; let end = start + self.chunk_size; let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end); let (head, nth_back) = temp.split_at_mut(start); self.v = head; Some(nth_back) } } } #[stable(feature = "chunks_exact", since = "1.31.0")] impl ExactSizeIterator for ChunksExactMut<'_, T> { fn is_empty(&self) -> bool { self.v.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for ChunksExactMut<'_, T> {} #[stable(feature = "chunks_exact", since = "1.31.0")] impl FusedIterator for ChunksExactMut<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) chunks (`N` elements at a /// time), starting at the beginning of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last /// up to `N-1` elements will be omitted but can be retrieved from /// the [`remainder`] function from the iterator. /// /// This struct is created by the [`array_chunks`] method on [slices]. /// /// [`array_chunks`]: ../../std/primitive.slice.html#method.array_chunks /// [`remainder`]: ArrayChunks::remainder /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[unstable(feature = "array_chunks", issue = "74985")] pub struct ArrayChunks<'a, T: 'a, const N: usize> { iter: Iter<'a, [T; N]>, rem: &'a [T], } impl<'a, T, const N: usize> ArrayChunks<'a, T, N> { /// Returns the remainder of the original slice that is not going to be /// returned by the iterator. The returned slice has at most `N-1` /// elements. #[unstable(feature = "array_chunks", issue = "74985")] pub fn remainder(&self) -> &'a [T] { self.rem } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[unstable(feature = "array_chunks", issue = "74985")] impl Clone for ArrayChunks<'_, T, N> { fn clone(&self) -> Self { ArrayChunks { iter: self.iter.clone(), rem: self.rem } } } #[unstable(feature = "array_chunks", issue = "74985")] impl<'a, T, const N: usize> Iterator for ArrayChunks<'a, T, N> { type Item = &'a [T; N]; #[inline] fn next(&mut self) -> Option<&'a [T; N]> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } #[inline] fn count(self) -> usize { self.iter.count() } #[inline] fn nth(&mut self, n: usize) -> Option { self.iter.nth(n) } #[inline] fn last(self) -> Option { self.iter.last() } unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T; N] { // SAFETY: The safety guarantees of `get_unchecked` are transferred to // the caller. unsafe { self.iter.get_unchecked(i) } } } #[unstable(feature = "array_chunks", issue = "74985")] impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunks<'a, T, N> { #[inline] fn next_back(&mut self) -> Option<&'a [T; N]> { self.iter.next_back() } #[inline] fn nth_back(&mut self, n: usize) -> Option { self.iter.nth_back(n) } } #[unstable(feature = "array_chunks", issue = "74985")] impl ExactSizeIterator for ArrayChunks<'_, T, N> { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for ArrayChunks<'_, T, N> {} #[unstable(feature = "array_chunks", issue = "74985")] impl FusedIterator for ArrayChunks<'_, T, N> {} #[doc(hidden)] #[unstable(feature = "array_chunks", issue = "74985")] unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunks<'a, T, N> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) mutable chunks (`N` elements /// at a time), starting at the beginning of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last /// up to `N-1` elements will be omitted but can be retrieved from /// the [`into_remainder`] function from the iterator. /// /// This struct is created by the [`array_chunks_mut`] method on [slices]. /// /// [`array_chunks_mut`]: ../../std/primitive.slice.html#method.array_chunks_mut /// [`into_remainder`]: ../../std/slice/struct.ArrayChunksMut.html#method.into_remainder /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[unstable(feature = "array_chunks", issue = "74985")] pub struct ArrayChunksMut<'a, T: 'a, const N: usize> { iter: IterMut<'a, [T; N]>, rem: &'a mut [T], } impl<'a, T, const N: usize> ArrayChunksMut<'a, T, N> { /// Returns the remainder of the original slice that is not going to be /// returned by the iterator. The returned slice has at most `N-1` /// elements. #[unstable(feature = "array_chunks", issue = "74985")] pub fn into_remainder(self) -> &'a mut [T] { self.rem } } #[unstable(feature = "array_chunks", issue = "74985")] impl<'a, T, const N: usize> Iterator for ArrayChunksMut<'a, T, N> { type Item = &'a mut [T; N]; #[inline] fn next(&mut self) -> Option<&'a mut [T; N]> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } #[inline] fn count(self) -> usize { self.iter.count() } #[inline] fn nth(&mut self, n: usize) -> Option { self.iter.nth(n) } #[inline] fn last(self) -> Option { self.iter.last() } } #[unstable(feature = "array_chunks", issue = "74985")] impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunksMut<'a, T, N> { #[inline] fn next_back(&mut self) -> Option<&'a mut [T; N]> { self.iter.next_back() } #[inline] fn nth_back(&mut self, n: usize) -> Option { self.iter.nth_back(n) } } #[unstable(feature = "array_chunks", issue = "74985")] impl ExactSizeIterator for ArrayChunksMut<'_, T, N> { fn is_empty(&self) -> bool { self.iter.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for ArrayChunksMut<'_, T, N> {} #[unstable(feature = "array_chunks", issue = "74985")] impl FusedIterator for ArrayChunksMut<'_, T, N> {} #[doc(hidden)] #[unstable(feature = "array_chunks", issue = "74985")] unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunksMut<'a, T, N> { unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T; N] { unsafe { self.iter.get_unchecked(i) } } fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a /// time), starting at the end of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last slice /// of the iteration will be the remainder. /// /// This struct is created by the [`rchunks`] method on [slices]. /// /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] pub struct RChunks<'a, T: 'a> { v: &'a [T], chunk_size: usize, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rchunks", since = "1.31.0")] impl Clone for RChunks<'_, T> { fn clone(&self) -> Self { RChunks { v: self.v, chunk_size: self.chunk_size } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> Iterator for RChunks<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.v.is_empty() { None } else { let chunksz = cmp::min(self.v.len(), self.chunk_size); let (fst, snd) = self.v.split_at(self.v.len() - chunksz); self.v = fst; Some(snd) } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.v.is_empty() { (0, Some(0)) } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option { let (end, overflow) = n.overflowing_mul(self.chunk_size); if end >= self.v.len() || overflow { self.v = &[]; None } else { // Can't underflow because of the check above let end = self.v.len() - end; let start = match end.checked_sub(self.chunk_size) { Some(sum) => sum, None => 0, }; let nth = &self.v[start..end]; self.v = &self.v[0..start]; Some(nth) } } #[inline] fn last(self) -> Option { if self.v.is_empty() { None } else { let rem = self.v.len() % self.chunk_size; let end = if rem == 0 { self.chunk_size } else { rem }; Some(&self.v[0..end]) } } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = match end.checked_sub(self.chunk_size) { None => 0, Some(start) => start, }; // SAFETY: mostly identical to `Chunks::get_unchecked`. unsafe { from_raw_parts(self.v.as_ptr().add(start), end - start) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> DoubleEndedIterator for RChunks<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.v.is_empty() { None } else { let remainder = self.v.len() % self.chunk_size; let chunksz = if remainder != 0 { remainder } else { self.chunk_size }; let (fst, snd) = self.v.split_at(chunksz); self.v = snd; Some(fst) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &[]; None } else { // can't underflow because `n < len` let offset_from_end = (len - 1 - n) * self.chunk_size; let end = self.v.len() - offset_from_end; let start = end.saturating_sub(self.chunk_size); let nth_back = &self.v[start..end]; self.v = &self.v[end..]; Some(nth_back) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl ExactSizeIterator for RChunks<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for RChunks<'_, T> {} #[stable(feature = "rchunks", since = "1.31.0")] impl FusedIterator for RChunks<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` /// elements at a time), starting at the end of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last slice /// of the iteration will be the remainder. /// /// This struct is created by the [`rchunks_mut`] method on [slices]. /// /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] pub struct RChunksMut<'a, T: 'a> { v: &'a mut [T], chunk_size: usize, } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> Iterator for RChunksMut<'a, T> { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.v.is_empty() { None } else { let sz = cmp::min(self.v.len(), self.chunk_size); let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (head, tail) = tmp.split_at_mut(tmp_len - sz); self.v = head; Some(tail) } } #[inline] fn size_hint(&self) -> (usize, Option) { if self.v.is_empty() { (0, Some(0)) } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option<&'a mut [T]> { let (end, overflow) = n.overflowing_mul(self.chunk_size); if end >= self.v.len() || overflow { self.v = &mut []; None } else { // Can't underflow because of the check above let end = self.v.len() - end; let start = match end.checked_sub(self.chunk_size) { Some(sum) => sum, None => 0, }; let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(start); let (nth, _) = tail.split_at_mut(end - start); self.v = head; Some(nth) } } #[inline] fn last(self) -> Option { if self.v.is_empty() { None } else { let rem = self.v.len() % self.chunk_size; let end = if rem == 0 { self.chunk_size } else { rem }; Some(&mut self.v[0..end]) } } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = match end.checked_sub(self.chunk_size) { None => 0, Some(start) => start, }; // SAFETY: see comments for `RChunks::get_unchecked` and `ChunksMut::get_unchecked` unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.v.is_empty() { None } else { let remainder = self.v.len() % self.chunk_size; let sz = if remainder != 0 { remainder } else { self.chunk_size }; let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(sz); self.v = tail; Some(head) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &mut []; None } else { // can't underflow because `n < len` let offset_from_end = (len - 1 - n) * self.chunk_size; let end = self.v.len() - offset_from_end; let start = end.saturating_sub(self.chunk_size); let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end); let (_, nth_back) = tmp.split_at_mut(start); self.v = tail; Some(nth_back) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl ExactSizeIterator for RChunksMut<'_, T> {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for RChunksMut<'_, T> {} #[stable(feature = "rchunks", since = "1.31.0")] impl FusedIterator for RChunksMut<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a /// time), starting at the end of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last /// up to `chunk_size-1` elements will be omitted but can be retrieved from /// the [`remainder`] function from the iterator. /// /// This struct is created by the [`rchunks_exact`] method on [slices]. /// /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact /// [`remainder`]: ChunksExact::remainder /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] pub struct RChunksExact<'a, T: 'a> { v: &'a [T], rem: &'a [T], chunk_size: usize, } impl<'a, T> RChunksExact<'a, T> { /// Returns the remainder of the original slice that is not going to be /// returned by the iterator. The returned slice has at most `chunk_size-1` /// elements. #[stable(feature = "rchunks", since = "1.31.0")] pub fn remainder(&self) -> &'a [T] { self.rem } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> Clone for RChunksExact<'a, T> { fn clone(&self) -> RChunksExact<'a, T> { RChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> Iterator for RChunksExact<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.v.len() < self.chunk_size { None } else { let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size); self.v = fst; Some(snd) } } #[inline] fn size_hint(&self) -> (usize, Option) { let n = self.v.len() / self.chunk_size; (n, Some(n)) } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option { let (end, overflow) = n.overflowing_mul(self.chunk_size); if end >= self.v.len() || overflow { self.v = &[]; None } else { let (fst, _) = self.v.split_at(self.v.len() - end); self.v = fst; self.next() } } #[inline] fn last(mut self) -> Option { self.next_back() } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = end - self.chunk_size; // SAFETY: // SAFETY: mostmy identical to `Chunks::get_unchecked`. unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.v.len() < self.chunk_size { None } else { let (fst, snd) = self.v.split_at(self.chunk_size); self.v = snd; Some(fst) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &[]; None } else { // now that we know that `n` corresponds to a chunk, // none of these operations can underflow/overflow let offset = (len - n) * self.chunk_size; let start = self.v.len() - offset; let end = start + self.chunk_size; let nth_back = &self.v[start..end]; self.v = &self.v[end..]; Some(nth_back) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> { fn is_empty(&self) -> bool { self.v.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for RChunksExact<'_, T> {} #[stable(feature = "rchunks", since = "1.31.0")] impl FusedIterator for RChunksExact<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> { fn may_have_side_effect() -> bool { false } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` /// elements at a time), starting at the end of the slice. /// /// When the slice len is not evenly divided by the chunk size, the last up to /// `chunk_size-1` elements will be omitted but can be retrieved from the /// [`into_remainder`] function from the iterator. /// /// This struct is created by the [`rchunks_exact_mut`] method on [slices]. /// /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut /// [`into_remainder`]: ChunksExactMut::into_remainder /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] pub struct RChunksExactMut<'a, T: 'a> { v: &'a mut [T], rem: &'a mut [T], chunk_size: usize, } impl<'a, T> RChunksExactMut<'a, T> { /// Returns the remainder of the original slice that is not going to be /// returned by the iterator. The returned slice has at most `chunk_size-1` /// elements. #[stable(feature = "rchunks", since = "1.31.0")] pub fn into_remainder(self) -> &'a mut [T] { self.rem } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> Iterator for RChunksExactMut<'a, T> { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.v.len() < self.chunk_size { None } else { let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size); self.v = head; Some(tail) } } #[inline] fn size_hint(&self) -> (usize, Option) { let n = self.v.len() / self.chunk_size; (n, Some(n)) } #[inline] fn count(self) -> usize { self.len() } #[inline] fn nth(&mut self, n: usize) -> Option<&'a mut [T]> { let (end, overflow) = n.overflowing_mul(self.chunk_size); if end >= self.v.len() || overflow { self.v = &mut []; None } else { let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (fst, _) = tmp.split_at_mut(tmp_len - end); self.v = fst; self.next() } } #[inline] fn last(mut self) -> Option { self.next_back() } #[doc(hidden)] unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = end - self.chunk_size; // SAFETY: see comments for `RChunksMut::get_unchecked`. unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.v.len() < self.chunk_size { None } else { let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(self.chunk_size); self.v = tail; Some(head) } } #[inline] fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { self.v = &mut []; None } else { // now that we know that `n` corresponds to a chunk, // none of these operations can underflow/overflow let offset = (len - n) * self.chunk_size; let start = self.v.len() - offset; let end = start + self.chunk_size; let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end); let (_, nth_back) = tmp.split_at_mut(start); self.v = tail; Some(nth_back) } } } #[stable(feature = "rchunks", since = "1.31.0")] impl ExactSizeIterator for RChunksExactMut<'_, T> { fn is_empty(&self) -> bool { self.v.is_empty() } } #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for RChunksExactMut<'_, T> {} #[stable(feature = "rchunks", since = "1.31.0")] impl FusedIterator for RChunksExactMut<'_, T> {} #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> { fn may_have_side_effect() -> bool { false } } // // Free functions // /// Forms a slice from a pointer and a length. /// /// The `len` argument is the number of **elements**, not the number of bytes. /// /// # Safety /// /// Behavior is undefined if any of the following conditions are violated: /// /// * `data` must be [valid] for reads for `len * mem::size_of::()` many bytes, /// and it must be properly aligned. This means in particular: /// /// * The entire memory range of this slice must be contained within a single allocated object! /// Slices can never span across multiple allocated objects. See [below](#incorrect-usage) /// for an example incorrectly not taking this into account. /// * `data` must be non-null and aligned even for zero-length slices. One /// reason for this is that enum layout optimizations may rely on references /// (including slices of any length) being aligned and non-null to distinguish /// them from other data. You can obtain a pointer that is usable as `data` /// for zero-length slices using [`NonNull::dangling()`]. /// /// * The memory referenced by the returned slice must not be mutated for the duration /// of lifetime `'a`, except inside an `UnsafeCell`. /// /// * The total size `len * mem::size_of::()` of the slice must be no larger than `isize::MAX`. /// See the safety documentation of [`pointer::offset`]. /// /// # Caveat /// /// The lifetime for the returned slice is inferred from its usage. To /// prevent accidental misuse, it's suggested to tie the lifetime to whichever /// source lifetime is safe in the context, such as by providing a helper /// function taking the lifetime of a host value for the slice, or by explicit /// annotation. /// /// # Examples /// /// ``` /// use std::slice; /// /// // manifest a slice for a single element /// let x = 42; /// let ptr = &x as *const _; /// let slice = unsafe { slice::from_raw_parts(ptr, 1) }; /// assert_eq!(slice[0], 42); /// ``` /// /// ### Incorrect usage /// /// The following `join_slices` function is **unsound** ⚠️ /// /// ```rust,no_run /// use std::slice; /// /// fn join_slices<'a, T>(fst: &'a [T], snd: &'a [T]) -> &'a [T] { /// let fst_end = fst.as_ptr().wrapping_add(fst.len()); /// let snd_start = snd.as_ptr(); /// assert_eq!(fst_end, snd_start, "Slices must be contiguous!"); /// unsafe { /// // The assertion above ensures `fst` and `snd` are contiguous, but they might /// // still be contained within _different allocated objects_, in which case /// // creating this slice is undefined behavior. /// slice::from_raw_parts(fst.as_ptr(), fst.len() + snd.len()) /// } /// } /// /// fn main() { /// // `a` and `b` are different allocated objects... /// let a = 42; /// let b = 27; /// // ... which may nevertheless be laid out contiguously in memory: | a | b | /// let _ = join_slices(slice::from_ref(&a), slice::from_ref(&b)); // UB /// } /// ``` /// /// [valid]: ptr#safety /// [`NonNull::dangling()`]: ptr::NonNull::dangling /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] { debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice"); debug_assert!( mem::size_of::().saturating_mul(len) <= isize::MAX as usize, "attempt to create slice covering at least half the address space" ); // SAFETY: the caller must uphold the safety contract for `from_raw_parts`. unsafe { &*ptr::slice_from_raw_parts(data, len) } } /// Performs the same functionality as [`from_raw_parts`], except that a /// mutable slice is returned. /// /// # Safety /// /// Behavior is undefined if any of the following conditions are violated: /// /// * `data` must be [valid] for boths reads and writes for `len * mem::size_of::()` many bytes, /// and it must be properly aligned. This means in particular: /// /// * The entire memory range of this slice must be contained within a single allocated object! /// Slices can never span across multiple allocated objects. /// * `data` must be non-null and aligned even for zero-length slices. One /// reason for this is that enum layout optimizations may rely on references /// (including slices of any length) being aligned and non-null to distinguish /// them from other data. You can obtain a pointer that is usable as `data` /// for zero-length slices using [`NonNull::dangling()`]. /// /// * The memory referenced by the returned slice must not be accessed through any other pointer /// (not derived from the return value) for the duration of lifetime `'a`. /// Both read and write accesses are forbidden. /// /// * The total size `len * mem::size_of::()` of the slice must be no larger than `isize::MAX`. /// See the safety documentation of [`pointer::offset`]. /// /// [valid]: ptr#safety /// [`NonNull::dangling()`]: ptr::NonNull::dangling /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] { debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice"); debug_assert!( mem::size_of::().saturating_mul(len) <= isize::MAX as usize, "attempt to create slice covering at least half the address space" ); // SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`. unsafe { &mut *ptr::slice_from_raw_parts_mut(data, len) } } /// Converts a reference to T into a slice of length 1 (without copying). #[stable(feature = "from_ref", since = "1.28.0")] pub fn from_ref(s: &T) -> &[T] { // SAFETY: a reference is guaranteed to be valid for reads. The returned // reference cannot be mutated as it is an immutable reference. // `mem::size_of::()` cannot be larger than `isize::MAX`. // Thus the call to `from_raw_parts` is safe. unsafe { from_raw_parts(s, 1) } } /// Converts a reference to T into a slice of length 1 (without copying). #[stable(feature = "from_ref", since = "1.28.0")] pub fn from_mut(s: &mut T) -> &mut [T] { // SAFETY: a mutable reference is guaranteed to be valid for writes. // The reference cannot be accessed by another pointer as it is an mutable reference. // `mem::size_of::()` cannot be larger than `isize::MAX`. // Thus the call to `from_raw_parts_mut` is safe. unsafe { from_raw_parts_mut(s, 1) } } // This function is public only because there is no other way to unit test heapsort. #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "none")] #[doc(hidden)] pub fn heapsort(v: &mut [T], mut is_less: F) where F: FnMut(&T, &T) -> bool, { sort::heapsort(v, &mut is_less); } // // Comparison traits // extern "C" { /// Calls implementation provided memcmp. /// /// Interprets the data as u8. /// /// Returns 0 for equal, < 0 for less than and > 0 for greater /// than. // FIXME(#32610): Return type should be c_int fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32; } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq<[B]> for [A] where A: PartialEq, { fn eq(&self, other: &[B]) -> bool { SlicePartialEq::equal(self, other) } fn ne(&self, other: &[B]) -> bool { SlicePartialEq::not_equal(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for [T] {} /// Implements comparison of vectors lexicographically. #[stable(feature = "rust1", since = "1.0.0")] impl Ord for [T] { fn cmp(&self, other: &[T]) -> Ordering { SliceOrd::compare(self, other) } } /// Implements comparison of vectors lexicographically. #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for [T] { fn partial_cmp(&self, other: &[T]) -> Option { SlicePartialOrd::partial_compare(self, other) } } #[doc(hidden)] // intermediate trait for specialization of slice's PartialEq trait SlicePartialEq { fn equal(&self, other: &[B]) -> bool; fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) } } // Generic slice equality impl SlicePartialEq for [A] where A: PartialEq, { default fn equal(&self, other: &[B]) -> bool { if self.len() != other.len() { return false; } self.iter().zip(other.iter()).all(|(x, y)| x == y) } } // Use an equal-pointer optimization when types are `Eq` // We can't make `A` and `B` the same type because `min_specialization` won't // allow it. impl SlicePartialEq for [A] where A: MarkerEq, { default fn equal(&self, other: &[B]) -> bool { if self.len() != other.len() { return false; } // While performance would suffer if `guaranteed_eq` just returned `false` // for all arguments, correctness and return value of this function are not affected. if self.as_ptr().guaranteed_eq(other.as_ptr() as *const A) { return true; } self.iter().zip(other.iter()).all(|(x, y)| x == y) } } // Use memcmp for bytewise equality when the types allow impl SlicePartialEq for [A] where A: BytewiseEquality, { fn equal(&self, other: &[B]) -> bool { if self.len() != other.len() { return false; } // While performance would suffer if `guaranteed_eq` just returned `false` // for all arguments, correctness and return value of this function are not affected. if self.as_ptr().guaranteed_eq(other.as_ptr() as *const A) { return true; } // SAFETY: `self` and `other` are references and are thus guaranteed to be valid. // The two slices have been checked to have the same size above. unsafe { let size = mem::size_of_val(self); memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0 } } } #[doc(hidden)] // intermediate trait for specialization of slice's PartialOrd trait SlicePartialOrd: Sized { fn partial_compare(left: &[Self], right: &[Self]) -> Option; } impl SlicePartialOrd for A { default fn partial_compare(left: &[A], right: &[A]) -> Option { let l = cmp::min(left.len(), right.len()); // Slice to the loop iteration range to enable bound check // elimination in the compiler let lhs = &left[..l]; let rhs = &right[..l]; for i in 0..l { match lhs[i].partial_cmp(&rhs[i]) { Some(Ordering::Equal) => (), non_eq => return non_eq, } } left.len().partial_cmp(&right.len()) } } // This is the impl that we would like to have. Unfortunately it's not sound. // See `partial_ord_slice.rs`. /* impl SlicePartialOrd for A where A: Ord, { default fn partial_compare(left: &[A], right: &[A]) -> Option { Some(SliceOrd::compare(left, right)) } } */ impl SlicePartialOrd for A { fn partial_compare(left: &[A], right: &[A]) -> Option { Some(SliceOrd::compare(left, right)) } } #[rustc_specialization_trait] trait AlwaysApplicableOrd: SliceOrd + Ord {} macro_rules! always_applicable_ord { ($([$($p:tt)*] $t:ty,)*) => { $(impl<$($p)*> AlwaysApplicableOrd for $t {})* } } always_applicable_ord! { [] u8, [] u16, [] u32, [] u64, [] u128, [] usize, [] i8, [] i16, [] i32, [] i64, [] i128, [] isize, [] bool, [] char, [T: ?Sized] *const T, [T: ?Sized] *mut T, [T: AlwaysApplicableOrd] &T, [T: AlwaysApplicableOrd] &mut T, [T: AlwaysApplicableOrd] Option, } #[doc(hidden)] // intermediate trait for specialization of slice's Ord trait SliceOrd: Sized { fn compare(left: &[Self], right: &[Self]) -> Ordering; } impl SliceOrd for A { default fn compare(left: &[Self], right: &[Self]) -> Ordering { let l = cmp::min(left.len(), right.len()); // Slice to the loop iteration range to enable bound check // elimination in the compiler let lhs = &left[..l]; let rhs = &right[..l]; for i in 0..l { match lhs[i].cmp(&rhs[i]) { Ordering::Equal => (), non_eq => return non_eq, } } left.len().cmp(&right.len()) } } // memcmp compares a sequence of unsigned bytes lexicographically. // this matches the order we want for [u8], but no others (not even [i8]). impl SliceOrd for u8 { #[inline] fn compare(left: &[Self], right: &[Self]) -> Ordering { let order = // SAFETY: `left` and `right` are references and are thus guaranteed to be valid. // We use the minimum of both lengths which guarantees that both regions are // valid for reads in that interval. unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) }; if order == 0 { left.len().cmp(&right.len()) } else if order < 0 { Less } else { Greater } } } // Hack to allow specializing on `Eq` even though `Eq` has a method. #[rustc_unsafe_specialization_marker] trait MarkerEq: PartialEq {} impl MarkerEq for T {} #[doc(hidden)] /// Trait implemented for types that can be compared for equality using /// their bytewise representation #[rustc_specialization_trait] trait BytewiseEquality: MarkerEq + Copy {} macro_rules! impl_marker_for { ($traitname:ident, $($ty:ty)*) => { $( impl $traitname<$ty> for $ty { } )* } } impl_marker_for!(BytewiseEquality, u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool); #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> { fn may_have_side_effect() -> bool { false } } #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> { fn may_have_side_effect() -> bool { false } } trait SliceContains: Sized { fn slice_contains(&self, x: &[Self]) -> bool; } impl SliceContains for T where T: PartialEq, { default fn slice_contains(&self, x: &[Self]) -> bool { x.iter().any(|y| *y == *self) } } impl SliceContains for u8 { fn slice_contains(&self, x: &[Self]) -> bool { memchr::memchr(*self, x).is_some() } } impl SliceContains for i8 { fn slice_contains(&self, x: &[Self]) -> bool { let byte = *self as u8; // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()` // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed // to be valid for reads for the length of the slice `x.len()`, which cannot be larger // than `isize::MAX`. The returned slice is never mutated. let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) }; memchr::memchr(byte, bytes).is_some() } }