1
Fork 0

std: Standardize (input, output) param orderings

This functions swaps the order of arguments to a few functions that previously
took (output, input) parameters, but now take (input, output) parameters (in
that order).

The affected functions are:

* ptr::copy
* ptr::copy_nonoverlapping
* slice::bytes::copy_memory
* intrinsics::copy
* intrinsics::copy_nonoverlapping

Closes #22890
[breaking-change]
This commit is contained in:
Alex Crichton 2015-03-27 11:12:28 -07:00
parent 14192d6df5
commit acd48a2b3e
29 changed files with 147 additions and 107 deletions

View file

@ -301,7 +301,7 @@ mod imp {
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8 libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
} else { } else {
let new_ptr = allocate(size, align); let new_ptr = allocate(size, align);
ptr::copy(new_ptr, ptr, cmp::min(size, old_size)); ptr::copy(ptr, new_ptr, cmp::min(size, old_size));
deallocate(ptr, old_size, align); deallocate(ptr, old_size, align);
new_ptr new_ptr
} }

View file

@ -1133,13 +1133,13 @@ impl<K, V> Node<K, V> {
#[inline] #[inline]
unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V { unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
ptr::copy( ptr::copy(
self.keys_mut().as_mut_ptr().offset(index as isize + 1),
self.keys().as_ptr().offset(index as isize), self.keys().as_ptr().offset(index as isize),
self.keys_mut().as_mut_ptr().offset(index as isize + 1),
self.len() - index self.len() - index
); );
ptr::copy( ptr::copy(
self.vals_mut().as_mut_ptr().offset(index as isize + 1),
self.vals().as_ptr().offset(index as isize), self.vals().as_ptr().offset(index as isize),
self.vals_mut().as_mut_ptr().offset(index as isize + 1),
self.len() - index self.len() - index
); );
@ -1155,8 +1155,8 @@ impl<K, V> Node<K, V> {
#[inline] #[inline]
unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) { unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
ptr::copy( ptr::copy(
self.edges_mut().as_mut_ptr().offset(index as isize + 1),
self.edges().as_ptr().offset(index as isize), self.edges().as_ptr().offset(index as isize),
self.edges_mut().as_mut_ptr().offset(index as isize + 1),
self.len() - index self.len() - index
); );
ptr::write(self.edges_mut().get_unchecked_mut(index), edge); ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
@ -1188,13 +1188,13 @@ impl<K, V> Node<K, V> {
let val = ptr::read(self.vals().get_unchecked(index)); let val = ptr::read(self.vals().get_unchecked(index));
ptr::copy( ptr::copy(
self.keys_mut().as_mut_ptr().offset(index as isize),
self.keys().as_ptr().offset(index as isize + 1), self.keys().as_ptr().offset(index as isize + 1),
self.keys_mut().as_mut_ptr().offset(index as isize),
self.len() - index - 1 self.len() - index - 1
); );
ptr::copy( ptr::copy(
self.vals_mut().as_mut_ptr().offset(index as isize),
self.vals().as_ptr().offset(index as isize + 1), self.vals().as_ptr().offset(index as isize + 1),
self.vals_mut().as_mut_ptr().offset(index as isize),
self.len() - index - 1 self.len() - index - 1
); );
@ -1209,8 +1209,8 @@ impl<K, V> Node<K, V> {
let edge = ptr::read(self.edges().get_unchecked(index)); let edge = ptr::read(self.edges().get_unchecked(index));
ptr::copy( ptr::copy(
self.edges_mut().as_mut_ptr().offset(index as isize),
self.edges().as_ptr().offset(index as isize + 1), self.edges().as_ptr().offset(index as isize + 1),
self.edges_mut().as_mut_ptr().offset(index as isize),
// index can be == len+1, so do the +1 first to avoid underflow. // index can be == len+1, so do the +1 first to avoid underflow.
(self.len() + 1) - index (self.len() + 1) - index
); );
@ -1237,19 +1237,19 @@ impl<K, V> Node<K, V> {
right._len = self.len() / 2; right._len = self.len() / 2;
let right_offset = self.len() - right.len(); let right_offset = self.len() - right.len();
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
right.keys_mut().as_mut_ptr(),
self.keys().as_ptr().offset(right_offset as isize), self.keys().as_ptr().offset(right_offset as isize),
right.keys_mut().as_mut_ptr(),
right.len() right.len()
); );
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
right.vals_mut().as_mut_ptr(),
self.vals().as_ptr().offset(right_offset as isize), self.vals().as_ptr().offset(right_offset as isize),
right.vals_mut().as_mut_ptr(),
right.len() right.len()
); );
if !self.is_leaf() { if !self.is_leaf() {
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
right.edges_mut().as_mut_ptr(),
self.edges().as_ptr().offset(right_offset as isize), self.edges().as_ptr().offset(right_offset as isize),
right.edges_mut().as_mut_ptr(),
right.len() + 1 right.len() + 1
); );
} }
@ -1278,19 +1278,19 @@ impl<K, V> Node<K, V> {
ptr::write(self.vals_mut().get_unchecked_mut(old_len), val); ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
right.keys().as_ptr(), right.keys().as_ptr(),
self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
right.len() right.len()
); );
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
right.vals().as_ptr(), right.vals().as_ptr(),
self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
right.len() right.len()
); );
if !self.is_leaf() { if !self.is_leaf() {
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
right.edges().as_ptr(), right.edges().as_ptr(),
self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
right.len() + 1 right.len() + 1
); );
} }

View file

@ -1320,10 +1320,10 @@ fn insertion_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O
if i != j { if i != j {
let tmp = ptr::read(read_ptr); let tmp = ptr::read(read_ptr);
ptr::copy(buf_v.offset(j + 1), ptr::copy(&*buf_v.offset(j),
&*buf_v.offset(j), buf_v.offset(j + 1),
(i - j) as usize); (i - j) as usize);
ptr::copy_nonoverlapping(buf_v.offset(j), &tmp, 1); ptr::copy_nonoverlapping(&tmp, buf_v.offset(j), 1);
mem::forget(tmp); mem::forget(tmp);
} }
} }
@ -1396,10 +1396,10 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
// j + 1 could be `len` (for the last `i`), but in // j + 1 could be `len` (for the last `i`), but in
// that case, `i == j` so we don't copy. The // that case, `i == j` so we don't copy. The
// `.offset(j)` is always in bounds. // `.offset(j)` is always in bounds.
ptr::copy(buf_dat.offset(j + 1), ptr::copy(&*buf_dat.offset(j),
&*buf_dat.offset(j), buf_dat.offset(j + 1),
i - j as usize); i - j as usize);
ptr::copy_nonoverlapping(buf_dat.offset(j), read_ptr, 1); ptr::copy_nonoverlapping(read_ptr, buf_dat.offset(j), 1);
} }
} }
} }
@ -1447,11 +1447,11 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
if left == right_start { if left == right_start {
// the number remaining in this run. // the number remaining in this run.
let elems = (right_end as usize - right as usize) / mem::size_of::<T>(); let elems = (right_end as usize - right as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping(out, &*right, elems); ptr::copy_nonoverlapping(&*right, out, elems);
break; break;
} else if right == right_end { } else if right == right_end {
let elems = (right_start as usize - left as usize) / mem::size_of::<T>(); let elems = (right_start as usize - left as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping(out, &*left, elems); ptr::copy_nonoverlapping(&*left, out, elems);
break; break;
} }
@ -1465,7 +1465,7 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
} else { } else {
step(&mut left) step(&mut left)
}; };
ptr::copy_nonoverlapping(out, &*to_copy, 1); ptr::copy_nonoverlapping(&*to_copy, out, 1);
step(&mut out); step(&mut out);
} }
} }
@ -1479,7 +1479,7 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
// write the result to `v` in one go, so that there are never two copies // write the result to `v` in one go, so that there are never two copies
// of the same object in `v`. // of the same object in `v`.
unsafe { unsafe {
ptr::copy_nonoverlapping(v.as_mut_ptr(), &*buf_dat, len); ptr::copy_nonoverlapping(&*buf_dat, v.as_mut_ptr(), len);
} }
// increment the pointer, returning the old pointer. // increment the pointer, returning the old pointer.

View file

@ -592,8 +592,8 @@ impl String {
let ch = self.char_at(idx); let ch = self.char_at(idx);
let next = idx + ch.len_utf8(); let next = idx + ch.len_utf8();
unsafe { unsafe {
ptr::copy(self.vec.as_mut_ptr().offset(idx as isize), ptr::copy(self.vec.as_ptr().offset(next as isize),
self.vec.as_ptr().offset(next as isize), self.vec.as_mut_ptr().offset(idx as isize),
len - next); len - next);
self.vec.set_len(len - (next - idx)); self.vec.set_len(len - (next - idx));
} }
@ -622,11 +622,11 @@ impl String {
let amt = ch.encode_utf8(&mut bits).unwrap(); let amt = ch.encode_utf8(&mut bits).unwrap();
unsafe { unsafe {
ptr::copy(self.vec.as_mut_ptr().offset((idx + amt) as isize), ptr::copy(self.vec.as_ptr().offset(idx as isize),
self.vec.as_ptr().offset(idx as isize), self.vec.as_mut_ptr().offset((idx + amt) as isize),
len - idx); len - idx);
ptr::copy(self.vec.as_mut_ptr().offset(idx as isize), ptr::copy(bits.as_ptr(),
bits.as_ptr(), self.vec.as_mut_ptr().offset(idx as isize),
amt); amt);
self.vec.set_len(len + amt); self.vec.set_len(len + amt);
} }

View file

@ -260,16 +260,17 @@ impl<T> Vec<T> {
/// Creates a vector by copying the elements from a raw pointer. /// Creates a vector by copying the elements from a raw pointer.
/// ///
/// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation /// This function will copy `elts` contiguous elements starting at `ptr`
/// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector /// into a new allocation owned by the returned `Vec<T>`. The elements of
/// without cloning, as if `ptr::read()` were called on them. /// the buffer are copied into the vector without cloning, as if
/// `ptr::read()` were called on them.
#[inline] #[inline]
#[unstable(feature = "collections", #[unstable(feature = "collections",
reason = "may be better expressed via composition")] reason = "may be better expressed via composition")]
pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> { pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
let mut dst = Vec::with_capacity(elts); let mut dst = Vec::with_capacity(elts);
dst.set_len(elts); dst.set_len(elts);
ptr::copy_nonoverlapping(dst.as_mut_ptr(), ptr, elts); ptr::copy_nonoverlapping(ptr, dst.as_mut_ptr(), elts);
dst dst
} }
@ -288,8 +289,9 @@ impl<T> Vec<T> {
self.cap self.cap
} }
/// Reserves capacity for at least `additional` more elements to be inserted in the given /// Reserves capacity for at least `additional` more elements to be inserted
/// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations. /// in the given `Vec<T>`. The collection may reserve more space to avoid
/// frequent reallocations.
/// ///
/// # Panics /// # Panics
/// ///
@ -541,7 +543,7 @@ impl<T> Vec<T> {
let p = self.as_mut_ptr().offset(index as isize); let p = self.as_mut_ptr().offset(index as isize);
// Shift everything over to make space. (Duplicating the // Shift everything over to make space. (Duplicating the
// `index`th element into two consecutive places.) // `index`th element into two consecutive places.)
ptr::copy(p.offset(1), &*p, len - index); ptr::copy(&*p, p.offset(1), len - index);
// Write it in, overwriting the first copy of the `index`th // Write it in, overwriting the first copy of the `index`th
// element. // element.
ptr::write(&mut *p, element); ptr::write(&mut *p, element);
@ -579,7 +581,7 @@ impl<T> Vec<T> {
ret = ptr::read(ptr); ret = ptr::read(ptr);
// Shift everything down to fill in that spot. // Shift everything down to fill in that spot.
ptr::copy(ptr, &*ptr.offset(1), len - index - 1); ptr::copy(&*ptr.offset(1), ptr, len - index - 1);
} }
self.set_len(len - 1); self.set_len(len - 1);
ret ret
@ -721,8 +723,8 @@ impl<T> Vec<T> {
let len = self.len(); let len = self.len();
unsafe { unsafe {
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
self.get_unchecked_mut(len),
other.as_ptr(), other.as_ptr(),
self.get_unchecked_mut(len),
other.len()); other.len());
} }
@ -1042,8 +1044,8 @@ impl<T> Vec<T> {
other.set_len(other_len); other.set_len(other_len);
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
other.as_mut_ptr(),
self.as_ptr().offset(at as isize), self.as_ptr().offset(at as isize),
other.as_mut_ptr(),
other.len()); other.len());
} }
other other

View file

@ -142,8 +142,8 @@ impl<T> VecDeque<T> {
debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap); self.cap);
ptr::copy( ptr::copy(
self.ptr.offset(dst as isize),
self.ptr.offset(src as isize), self.ptr.offset(src as isize),
self.ptr.offset(dst as isize),
len); len);
} }
@ -155,8 +155,8 @@ impl<T> VecDeque<T> {
debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap); self.cap);
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(
self.ptr.offset(dst as isize),
self.ptr.offset(src as isize), self.ptr.offset(src as isize),
self.ptr.offset(dst as isize),
len); len);
} }
} }
@ -1361,21 +1361,21 @@ impl<T> VecDeque<T> {
// `at` lies in the first half. // `at` lies in the first half.
let amount_in_first = first_len - at; let amount_in_first = first_len - at;
ptr::copy_nonoverlapping(*other.ptr, ptr::copy_nonoverlapping(first_half.as_ptr().offset(at as isize),
first_half.as_ptr().offset(at as isize), *other.ptr,
amount_in_first); amount_in_first);
// just take all of the second half. // just take all of the second half.
ptr::copy_nonoverlapping(other.ptr.offset(amount_in_first as isize), ptr::copy_nonoverlapping(second_half.as_ptr(),
second_half.as_ptr(), other.ptr.offset(amount_in_first as isize),
second_len); second_len);
} else { } else {
// `at` lies in the second half, need to factor in the elements we skipped // `at` lies in the second half, need to factor in the elements we skipped
// in the first half. // in the first half.
let offset = at - first_len; let offset = at - first_len;
let amount_in_second = second_len - offset; let amount_in_second = second_len - offset;
ptr::copy_nonoverlapping(*other.ptr, ptr::copy_nonoverlapping(second_half.as_ptr().offset(offset as isize),
second_half.as_ptr().offset(offset as isize), *other.ptr,
amount_in_second); amount_in_second);
} }
} }

View file

@ -316,8 +316,8 @@ pub fn float_to_str_bytes_common<T: Float, U, F>(
impl<'a> fmt::Write for Filler<'a> { impl<'a> fmt::Write for Filler<'a> {
fn write_str(&mut self, s: &str) -> fmt::Result { fn write_str(&mut self, s: &str) -> fmt::Result {
slice::bytes::copy_memory(&mut self.buf[(*self.end)..], slice::bytes::copy_memory(s.as_bytes(),
s.as_bytes()); &mut self.buf[(*self.end)..]);
*self.end += s.len(); *self.end += s.len();
Ok(()) Ok(())
} }

View file

@ -293,9 +293,9 @@ extern "rust-intrinsic" {
/// let mut t: T = mem::uninitialized(); /// let mut t: T = mem::uninitialized();
/// ///
/// // Perform the swap, `&mut` pointers never alias /// // Perform the swap, `&mut` pointers never alias
/// ptr::copy_nonoverlapping(&mut t, &*x, 1); /// ptr::copy_nonoverlapping(x, &mut t, 1);
/// ptr::copy_nonoverlapping(x, &*y, 1); /// ptr::copy_nonoverlapping(y, x, 1);
/// ptr::copy_nonoverlapping(y, &t, 1); /// ptr::copy_nonoverlapping(&t, y, 1);
/// ///
/// // y and t now point to the same thing, but we need to completely forget `tmp` /// // y and t now point to the same thing, but we need to completely forget `tmp`
/// // because it's no longer relevant. /// // because it's no longer relevant.
@ -304,6 +304,12 @@ extern "rust-intrinsic" {
/// } /// }
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
/// dox
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
pub fn copy_nonoverlapping<T>(dst: *mut T, src: *const T, count: usize); pub fn copy_nonoverlapping<T>(dst: *mut T, src: *const T, count: usize);
/// Copies `count * size_of<T>` bytes from `src` to `dst`. The source /// Copies `count * size_of<T>` bytes from `src` to `dst`. The source
@ -329,12 +335,18 @@ extern "rust-intrinsic" {
/// unsafe fn from_buf_raw<T>(ptr: *const T, elts: usize) -> Vec<T> { /// unsafe fn from_buf_raw<T>(ptr: *const T, elts: usize) -> Vec<T> {
/// let mut dst = Vec::with_capacity(elts); /// let mut dst = Vec::with_capacity(elts);
/// dst.set_len(elts); /// dst.set_len(elts);
/// ptr::copy(dst.as_mut_ptr(), ptr, elts); /// ptr::copy(ptr, dst.as_mut_ptr(), elts);
/// dst /// dst
/// } /// }
/// ``` /// ```
/// ///
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
pub fn copy<T>(src: *const T, dst: *mut T, count: usize);
/// dox
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
pub fn copy<T>(dst: *mut T, src: *const T, count: usize); pub fn copy<T>(dst: *mut T, src: *const T, count: usize);
/// Invokes memset on the specified pointer, setting `count * size_of::<T>()` /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`

View file

@ -229,9 +229,9 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
let mut t: T = uninitialized(); let mut t: T = uninitialized();
// Perform the swap, `&mut` pointers never alias // Perform the swap, `&mut` pointers never alias
ptr::copy_nonoverlapping(&mut t, &*x, 1); ptr::copy_nonoverlapping(&*x, &mut t, 1);
ptr::copy_nonoverlapping(x, &*y, 1); ptr::copy_nonoverlapping(&*y, x, 1);
ptr::copy_nonoverlapping(y, &t, 1); ptr::copy_nonoverlapping(&t, y, 1);
// y and t now point to the same thing, but we need to completely forget `t` // y and t now point to the same thing, but we need to completely forget `t`
// because it's no longer relevant. // because it's no longer relevant.

View file

@ -104,11 +104,28 @@ use cmp::Ordering::{self, Less, Equal, Greater};
// FIXME #19649: intrinsic docs don't render, so these have no docs :( // FIXME #19649: intrinsic docs don't render, so these have no docs :(
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
pub use intrinsics::copy_nonoverlapping; pub use intrinsics::copy_nonoverlapping;
/// dox
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
intrinsics::copy_nonoverlapping(dst, src, count)
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
pub use intrinsics::copy; pub use intrinsics::copy;
/// dox
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
intrinsics::copy(dst, src, count)
}
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub use intrinsics::write_bytes; pub use intrinsics::write_bytes;
@ -167,12 +184,11 @@ pub unsafe fn zero_memory<T>(dst: *mut T, count: usize) {
pub unsafe fn swap<T>(x: *mut T, y: *mut T) { pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
// Give ourselves some scratch space to work with // Give ourselves some scratch space to work with
let mut tmp: T = mem::uninitialized(); let mut tmp: T = mem::uninitialized();
let t: *mut T = &mut tmp;
// Perform the swap // Perform the swap
copy_nonoverlapping(t, &*x, 1); copy_nonoverlapping(x, &mut tmp, 1);
copy(x, &*y, 1); // `x` and `y` may overlap copy(y, x, 1); // `x` and `y` may overlap
copy_nonoverlapping(y, &*t, 1); copy_nonoverlapping(&tmp, y, 1);
// y and t now point to the same thing, but we need to completely forget `tmp` // y and t now point to the same thing, but we need to completely forget `tmp`
// because it's no longer relevant. // because it's no longer relevant.
@ -208,7 +224,7 @@ pub unsafe fn replace<T>(dest: *mut T, mut src: T) -> T {
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn read<T>(src: *const T) -> T { pub unsafe fn read<T>(src: *const T) -> T {
let mut tmp: T = mem::uninitialized(); let mut tmp: T = mem::uninitialized();
copy_nonoverlapping(&mut tmp, src, 1); copy_nonoverlapping(src, &mut tmp, 1);
tmp tmp
} }

View file

@ -1577,14 +1577,14 @@ pub mod bytes {
/// ///
/// Panics if the length of `dst` is less than the length of `src`. /// Panics if the length of `dst` is less than the length of `src`.
#[inline] #[inline]
pub fn copy_memory(dst: &mut [u8], src: &[u8]) { pub fn copy_memory(src: &[u8], dst: &mut [u8]) {
let len_src = src.len(); let len_src = src.len();
assert!(dst.len() >= len_src); assert!(dst.len() >= len_src);
// `dst` is unaliasable, so we know statically it doesn't overlap // `dst` is unaliasable, so we know statically it doesn't overlap
// with `src`. // with `src`.
unsafe { unsafe {
ptr::copy_nonoverlapping(dst.as_mut_ptr(), ptr::copy_nonoverlapping(src.as_ptr(),
src.as_ptr(), dst.as_mut_ptr(),
len_src); len_src);
} }
} }

View file

@ -35,18 +35,15 @@ fn test() {
let v0 = vec![32000u16, 32001u16, 32002u16]; let v0 = vec![32000u16, 32001u16, 32002u16];
let mut v1 = vec![0u16, 0u16, 0u16]; let mut v1 = vec![0u16, 0u16, 0u16];
copy(v1.as_mut_ptr().offset(1), copy(v0.as_ptr().offset(1), v1.as_mut_ptr().offset(1), 1);
v0.as_ptr().offset(1), 1);
assert!((v1[0] == 0u16 && assert!((v1[0] == 0u16 &&
v1[1] == 32001u16 && v1[1] == 32001u16 &&
v1[2] == 0u16)); v1[2] == 0u16));
copy(v1.as_mut_ptr(), copy(v0.as_ptr().offset(2), v1.as_mut_ptr(), 1);
v0.as_ptr().offset(2), 1);
assert!((v1[0] == 32002u16 && assert!((v1[0] == 32002u16 &&
v1[1] == 32001u16 && v1[1] == 32001u16 &&
v1[2] == 0u16)); v1[2] == 0u16));
copy(v1.as_mut_ptr().offset(2), copy(v0.as_ptr(), v1.as_mut_ptr().offset(2), 1);
v0.as_ptr(), 1);
assert!((v1[0] == 32002u16 && assert!((v1[0] == 32002u16 &&
v1[1] == 32001u16 && v1[1] == 32001u16 &&
v1[2] == 32000u16)); v1[2] == 32000u16));

View file

@ -449,21 +449,21 @@ pub mod reader {
pub fn doc_as_u16(d: Doc) -> u16 { pub fn doc_as_u16(d: Doc) -> u16 {
assert_eq!(d.end, d.start + 2); assert_eq!(d.end, d.start + 2);
let mut b = [0; 2]; let mut b = [0; 2];
bytes::copy_memory(&mut b, &d.data[d.start..d.end]); bytes::copy_memory(&d.data[d.start..d.end], &mut b);
unsafe { (*(b.as_ptr() as *const u16)).to_be() } unsafe { (*(b.as_ptr() as *const u16)).to_be() }
} }
pub fn doc_as_u32(d: Doc) -> u32 { pub fn doc_as_u32(d: Doc) -> u32 {
assert_eq!(d.end, d.start + 4); assert_eq!(d.end, d.start + 4);
let mut b = [0; 4]; let mut b = [0; 4];
bytes::copy_memory(&mut b, &d.data[d.start..d.end]); bytes::copy_memory(&d.data[d.start..d.end], &mut b);
unsafe { (*(b.as_ptr() as *const u32)).to_be() } unsafe { (*(b.as_ptr() as *const u32)).to_be() }
} }
pub fn doc_as_u64(d: Doc) -> u64 { pub fn doc_as_u64(d: Doc) -> u64 {
assert_eq!(d.end, d.start + 8); assert_eq!(d.end, d.start + 8);
let mut b = [0; 8]; let mut b = [0; 8];
bytes::copy_memory(&mut b, &d.data[d.start..d.end]); bytes::copy_memory(&d.data[d.start..d.end], &mut b);
unsafe { (*(b.as_ptr() as *const u64)).to_be() } unsafe { (*(b.as_ptr() as *const u64)).to_be() }
} }
@ -938,7 +938,7 @@ pub mod writer {
{ {
let last_size_pos = last_size_pos as usize; let last_size_pos = last_size_pos as usize;
let data = &self.writer.get_ref()[last_size_pos+4..cur_pos as usize]; let data = &self.writer.get_ref()[last_size_pos+4..cur_pos as usize];
bytes::copy_memory(&mut buf, data); bytes::copy_memory(data, &mut buf);
} }
// overwrite the size and data and continue // overwrite the size and data and continue

View file

@ -62,7 +62,7 @@ pub type Cmd<'a> = &'a crate_metadata;
fn u32_from_be_bytes(bytes: &[u8]) -> u32 { fn u32_from_be_bytes(bytes: &[u8]) -> u32 {
let mut b = [0; 4]; let mut b = [0; 4];
bytes::copy_memory(&mut b, &bytes[..4]); bytes::copy_memory(&bytes[..4], &mut b);
unsafe { (*(b.as_ptr() as *const u32)).to_be() } unsafe { (*(b.as_ptr() as *const u32)).to_be() }
} }

View file

@ -139,15 +139,15 @@ impl FixedBuffer for FixedBuffer64 {
let buffer_remaining = size - self.buffer_idx; let buffer_remaining = size - self.buffer_idx;
if input.len() >= buffer_remaining { if input.len() >= buffer_remaining {
copy_memory( copy_memory(
&mut self.buffer[self.buffer_idx..size], &input[..buffer_remaining],
&input[..buffer_remaining]); &mut self.buffer[self.buffer_idx..size]);
self.buffer_idx = 0; self.buffer_idx = 0;
func(&self.buffer); func(&self.buffer);
i += buffer_remaining; i += buffer_remaining;
} else { } else {
copy_memory( copy_memory(
&mut self.buffer[self.buffer_idx..self.buffer_idx + input.len()], input,
input); &mut self.buffer[self.buffer_idx..self.buffer_idx + input.len()]);
self.buffer_idx += input.len(); self.buffer_idx += input.len();
return; return;
} }
@ -165,8 +165,8 @@ impl FixedBuffer for FixedBuffer64 {
// be empty. // be empty.
let input_remaining = input.len() - i; let input_remaining = input.len() - i;
copy_memory( copy_memory(
&mut self.buffer[..input_remaining], &input[i..],
&input[i..]); &mut self.buffer[..input_remaining]);
self.buffer_idx += input_remaining; self.buffer_idx += input_remaining;
} }

View file

@ -398,8 +398,8 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
false, false,
false, false,
*substs.types.get(FnSpace, 0), *substs.types.get(FnSpace, 0),
llargs[0],
llargs[1], llargs[1],
llargs[0],
llargs[2], llargs[2],
call_debug_location) call_debug_location)
} }
@ -408,8 +408,8 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
true, true,
false, false,
*substs.types.get(FnSpace, 0), *substs.types.get(FnSpace, 0),
llargs[0],
llargs[1], llargs[1],
llargs[0],
llargs[2], llargs[2],
call_debug_location) call_debug_location)
} }

View file

@ -5417,7 +5417,21 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
})) }))
} }
"copy" | "copy_nonoverlapping" | "copy" | "copy_nonoverlapping" => {
(1,
vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
tcx.types.usize,
),
ty::mk_nil(tcx))
}
"volatile_copy_memory" | "volatile_copy_nonoverlapping_memory" => { "volatile_copy_memory" | "volatile_copy_nonoverlapping_memory" => {
(1, (1,
vec!( vec!(

View file

@ -480,8 +480,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> { pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
unsafe { unsafe {
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET); *self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
ptr::copy_nonoverlapping(self.gap.raw.key, self.full.raw.key, 1); ptr::copy_nonoverlapping(self.full.raw.key, self.gap.raw.key, 1);
ptr::copy_nonoverlapping(self.gap.raw.val, self.full.raw.val, 1); ptr::copy_nonoverlapping(self.full.raw.val, self.gap.raw.val, 1);
} }
let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full; let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;

View file

@ -177,8 +177,8 @@ impl<W: Write> BufWriter<W> {
if written > 0 { if written > 0 {
// NB: would be better expressed as .remove(0..n) if it existed // NB: would be better expressed as .remove(0..n) if it existed
unsafe { unsafe {
ptr::copy(self.buf.as_mut_ptr(), ptr::copy(self.buf.as_ptr().offset(written as isize),
self.buf.as_ptr().offset(written as isize), self.buf.as_mut_ptr(),
len - written); len - written);
} }
} }

View file

@ -151,7 +151,7 @@ impl Write for Cursor<Vec<u8>> {
// there (left), and what will be appended on the end (right) // there (left), and what will be appended on the end (right)
let space = self.inner.len() - pos as usize; let space = self.inner.len() - pos as usize;
let (left, right) = buf.split_at(cmp::min(space, buf.len())); let (left, right) = buf.split_at(cmp::min(space, buf.len()));
slice::bytes::copy_memory(&mut self.inner[(pos as usize)..], left); slice::bytes::copy_memory(left, &mut self.inner[(pos as usize)..]);
self.inner.push_all(right); self.inner.push_all(right);
// Bump us forward // Bump us forward

View file

@ -149,7 +149,7 @@ impl<'a> Read for &'a [u8] {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let amt = cmp::min(buf.len(), self.len()); let amt = cmp::min(buf.len(), self.len());
let (a, b) = self.split_at(amt); let (a, b) = self.split_at(amt);
slice::bytes::copy_memory(buf, a); slice::bytes::copy_memory(a, buf);
*self = b; *self = b;
Ok(amt) Ok(amt)
} }
@ -170,7 +170,7 @@ impl<'a> Write for &'a mut [u8] {
fn write(&mut self, data: &[u8]) -> io::Result<usize> { fn write(&mut self, data: &[u8]) -> io::Result<usize> {
let amt = cmp::min(data.len(), self.len()); let amt = cmp::min(data.len(), self.len());
let (a, b) = mem::replace(self, &mut []).split_at_mut(amt); let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);
slice::bytes::copy_memory(a, &data[..amt]); slice::bytes::copy_memory(&data[..amt], a);
*self = b; *self = b;
Ok(amt) Ok(amt)
} }

View file

@ -118,7 +118,7 @@ impl<R: Reader> Reader for BufferedReader<R> {
let nread = { let nread = {
let available = try!(self.fill_buf()); let available = try!(self.fill_buf());
let nread = cmp::min(available.len(), buf.len()); let nread = cmp::min(available.len(), buf.len());
slice::bytes::copy_memory(buf, &available[..nread]); slice::bytes::copy_memory(&available[..nread], buf);
nread nread
}; };
self.pos += nread; self.pos += nread;
@ -225,7 +225,7 @@ impl<W: Writer> Writer for BufferedWriter<W> {
self.inner.as_mut().unwrap().write_all(buf) self.inner.as_mut().unwrap().write_all(buf)
} else { } else {
let dst = &mut self.buf[self.pos..]; let dst = &mut self.buf[self.pos..];
slice::bytes::copy_memory(dst, buf); slice::bytes::copy_memory(buf, dst);
self.pos += buf.len(); self.pos += buf.len();
Ok(()) Ok(())
} }

View file

@ -91,7 +91,7 @@ impl Reader for ChanReader {
Some(src) => { Some(src) => {
let dst = &mut buf[num_read..]; let dst = &mut buf[num_read..];
let count = cmp::min(src.len(), dst.len()); let count = cmp::min(src.len(), dst.len());
bytes::copy_memory(dst, &src[..count]); bytes::copy_memory(&src[..count], dst);
count count
}, },
None => 0, None => 0,

View file

@ -171,7 +171,7 @@ pub fn u64_from_be_bytes(data: &[u8], start: usize, size: usize) -> u64 {
unsafe { unsafe {
let ptr = data.as_ptr().offset(start as isize); let ptr = data.as_ptr().offset(start as isize);
let out = buf.as_mut_ptr(); let out = buf.as_mut_ptr();
copy_nonoverlapping(out.offset((8 - size) as isize), ptr, size); copy_nonoverlapping(ptr, out.offset((8 - size) as isize), size);
(*(out as *const u64)).to_be() (*(out as *const u64)).to_be()
} }
} }

View file

@ -168,7 +168,7 @@ impl Reader for MemReader {
let input = &self.buf[self.pos.. self.pos + write_len]; let input = &self.buf[self.pos.. self.pos + write_len];
let output = &mut buf[..write_len]; let output = &mut buf[..write_len];
assert_eq!(input.len(), output.len()); assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input); slice::bytes::copy_memory(input, output);
} }
self.pos += write_len; self.pos += write_len;
assert!(self.pos <= self.buf.len()); assert!(self.pos <= self.buf.len());
@ -212,7 +212,7 @@ impl<'a> Reader for &'a [u8] {
{ {
let input = &self[..write_len]; let input = &self[..write_len];
let output = &mut buf[.. write_len]; let output = &mut buf[.. write_len];
slice::bytes::copy_memory(output, input); slice::bytes::copy_memory(input, output);
} }
*self = &self[write_len..]; *self = &self[write_len..];
@ -287,13 +287,13 @@ impl<'a> Writer for BufWriter<'a> {
let src_len = src.len(); let src_len = src.len();
if dst_len >= src_len { if dst_len >= src_len {
slice::bytes::copy_memory(dst, src); slice::bytes::copy_memory(src, dst);
self.pos += src_len; self.pos += src_len;
Ok(()) Ok(())
} else { } else {
slice::bytes::copy_memory(dst, &src[..dst_len]); slice::bytes::copy_memory(&src[..dst_len], dst);
self.pos += dst_len; self.pos += dst_len;
@ -360,7 +360,7 @@ impl<'a> Reader for BufReader<'a> {
let input = &self.buf[self.pos.. self.pos + write_len]; let input = &self.buf[self.pos.. self.pos + write_len];
let output = &mut buf[..write_len]; let output = &mut buf[..write_len];
assert_eq!(input.len(), output.len()); assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input); slice::bytes::copy_memory(input, output);
} }
self.pos += write_len; self.pos += write_len;
assert!(self.pos <= self.buf.len()); assert!(self.pos <= self.buf.len());

View file

@ -344,8 +344,8 @@ impl Wtf8Buf {
Some((surrogate_pos, _)) => { Some((surrogate_pos, _)) => {
pos = surrogate_pos + 3; pos = surrogate_pos + 3;
slice::bytes::copy_memory( slice::bytes::copy_memory(
UTF8_REPLACEMENT_CHARACTER,
&mut self.bytes[surrogate_pos .. pos], &mut self.bytes[surrogate_pos .. pos],
UTF8_REPLACEMENT_CHARACTER
); );
}, },
None => return unsafe { String::from_utf8_unchecked(self.bytes) } None => return unsafe { String::from_utf8_unchecked(self.bytes) }

View file

@ -126,10 +126,9 @@ impl<'a, W: Writer> RepeatFasta<'a, W> {
let mut buf = repeat(0).take(alu_len + LINE_LEN).collect::<Vec<_>>(); let mut buf = repeat(0).take(alu_len + LINE_LEN).collect::<Vec<_>>();
let alu: &[u8] = self.alu.as_bytes(); let alu: &[u8] = self.alu.as_bytes();
copy_memory(&mut buf, alu); copy_memory(alu, &mut buf);
let buf_len = buf.len(); let buf_len = buf.len();
copy_memory(&mut buf[alu_len..buf_len], copy_memory(&alu[..LINE_LEN], &mut buf[alu_len..buf_len]);
&alu[..LINE_LEN]);
let mut pos = 0; let mut pos = 0;
let mut bytes; let mut bytes;

View file

@ -181,8 +181,8 @@ fn reverse_complement(seq: &mut [u8], tables: &Tables) {
let mut i = LINE_LEN; let mut i = LINE_LEN;
while i < len { while i < len {
unsafe { unsafe {
copy(seq.as_mut_ptr().offset((i - off + 1) as isize), copy(seq.as_ptr().offset((i - off) as isize),
seq.as_ptr().offset((i - off) as isize), off); seq.as_mut_ptr().offset((i - off + 1) as isize), off);
*seq.get_unchecked_mut(i - off) = b'\n'; *seq.get_unchecked_mut(i - off) = b'\n';
} }
i += LINE_LEN + 1; i += LINE_LEN + 1;

View file

@ -26,7 +26,7 @@ trait MyWriter {
impl<'a> MyWriter for &'a mut [u8] { impl<'a> MyWriter for &'a mut [u8] {
fn my_write(&mut self, buf: &[u8]) -> IoResult<()> { fn my_write(&mut self, buf: &[u8]) -> IoResult<()> {
slice::bytes::copy_memory(*self, buf); slice::bytes::copy_memory(buf, *self);
let write_len = buf.len(); let write_len = buf.len();
unsafe { unsafe {