1
Fork 0

rollup merge of #23668: alexcrichton/io-zero

This commit alters the behavior of the `Read::read_to_end()` method to zero all
memory instead of passing an uninitialized buffer to `read`. This change is
motivated by the [discussion on the internals forum][discuss] where the
conclusion has been that the standard library will not expose uninitialized
memory.

[discuss]: http://internals.rust-lang.org/t/uninitialized-memory/1652

Closes #20314
This commit is contained in:
Alex Crichton 2015-03-24 14:50:48 -07:00
commit 5ed8733ea3

View file

@ -16,13 +16,12 @@ use cmp;
use unicode::str as core_str; use unicode::str as core_str;
use error as std_error; use error as std_error;
use fmt; use fmt;
use iter::Iterator; use iter::{self, Iterator, IteratorExt, Extend};
use marker::Sized; use marker::Sized;
use ops::{Drop, FnOnce}; use ops::{Drop, FnOnce};
use option::Option::{self, Some, None}; use option::Option::{self, Some, None};
use result::Result::{Ok, Err}; use result::Result::{Ok, Err};
use result; use result;
use slice;
use string::String; use string::String;
use str; use str;
use vec::Vec; use vec::Vec;
@ -50,41 +49,26 @@ mod stdio;
const DEFAULT_BUF_SIZE: usize = 64 * 1024; const DEFAULT_BUF_SIZE: usize = 64 * 1024;
// Acquires a slice of the vector `v` from its length to its capacity // Acquires a slice of the vector `v` from its length to its capacity
// (uninitialized data), reads into it, and then updates the length. // (after initializing the data), reads into it, and then updates the length.
// //
// This function is leveraged to efficiently read some bytes into a destination // This function is leveraged to efficiently read some bytes into a destination
// vector without extra copying and taking advantage of the space that's already // vector without extra copying and taking advantage of the space that's already
// in `v`. // in `v`.
//
// The buffer we're passing down, however, is pointing at uninitialized data
// (the end of a `Vec`), and many operations will be *much* faster if we don't
// have to zero it out. In order to prevent LLVM from generating an `undef`
// value when reads happen from this uninitialized memory, we force LLVM to
// think it's initialized by sending it through a black box. This should prevent
// actual undefined behavior after optimizations.
fn with_end_to_cap<F>(v: &mut Vec<u8>, f: F) -> Result<usize> fn with_end_to_cap<F>(v: &mut Vec<u8>, f: F) -> Result<usize>
where F: FnOnce(&mut [u8]) -> Result<usize> where F: FnOnce(&mut [u8]) -> Result<usize>
{ {
unsafe { let len = v.len();
let n = try!(f({ let new_area = v.capacity() - len;
let base = v.as_mut_ptr().offset(v.len() as isize); v.extend(iter::repeat(0).take(new_area));
black_box(slice::from_raw_parts_mut(base, match f(&mut v[len..]) {
v.capacity() - v.len())) Ok(n) => {
})); v.truncate(len + n);
Ok(n)
// If the closure (typically a `read` implementation) reported that it }
// read a larger number of bytes than the vector actually has, we need Err(e) => {
// to be sure to clamp the vector to at most its capacity. v.truncate(len);
let new_len = cmp::min(v.capacity(), v.len() + n); Err(e)
v.set_len(new_len); }
return Ok(n);
}
// Semi-hack used to prevent LLVM from retaining any assumptions about
// `dummy` over this function call
unsafe fn black_box<T>(mut dummy: T) -> T {
asm!("" :: "r"(&mut dummy) : "memory");
dummy
} }
} }