Auto merge of #93138 - matthiaskrgr:rollup-m8akifd, r=matthiaskrgr
Rollup of 17 pull requests Successful merges: - #91032 (Introduce drop range tracking to generator interior analysis) - #92856 (Exclude "test" from doc_auto_cfg) - #92860 (Fix errors on blanket impls by ignoring the children of generated impls) - #93038 (Fix star handling in block doc comments) - #93061 (Only suggest adding `!` to expressions that can be macro invocation) - #93067 (rustdoc mobile: fix scroll offset when jumping to internal id) - #93086 (Add tests to ensure that `let_chains` works with `if_let_guard`) - #93087 (Fix src/test/run-make/raw-dylib-alt-calling-convention) - #93091 (⬆ chalk to 0.76.0) - #93094 (src/test/rustdoc-json: Check for `struct_field`s in `variant_tuple_struct.rs`) - #93098 (Show a more informative panic message when `DefPathHash` does not exist) - #93099 (rustdoc: auto create output directory when "--output-format json") - #93102 (Pretty printer algorithm revamp step 3) - #93104 (Support --bless for pp-exact pretty printer tests) - #93114 (update comment for `ensure_monomorphic_enough`) - #93128 (Add script to prevent point releases with same number as existing ones) - #93136 (Backport the 1.58.1 release notes to master) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
523be2e05d
62 changed files with 1925 additions and 339 deletions
|
@ -137,7 +137,7 @@ mod ring;
|
|||
use ring::RingBuffer;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
|
||||
/// How to break. Described in more detail in the module docs.
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
|
@ -175,27 +175,10 @@ impl Token {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Token {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
Token::String(ref s) => write!(f, "STR({},{})", s, s.len()),
|
||||
Token::Break(_) => f.write_str("BREAK"),
|
||||
Token::Begin(_) => f.write_str("BEGIN"),
|
||||
Token::End => f.write_str("END"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum PrintStackBreak {
|
||||
enum PrintFrame {
|
||||
Fits,
|
||||
Broken(Breaks),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct PrintStackElem {
|
||||
offset: isize,
|
||||
pbreak: PrintStackBreak,
|
||||
Broken { offset: isize, breaks: Breaks },
|
||||
}
|
||||
|
||||
const SIZE_INFINITY: isize = 0xffff;
|
||||
|
@ -220,7 +203,7 @@ pub struct Printer {
|
|||
/// advancing.
|
||||
scan_stack: VecDeque<usize>,
|
||||
/// Stack of blocks-in-progress being flushed by print
|
||||
print_stack: Vec<PrintStackElem>,
|
||||
print_stack: Vec<PrintFrame>,
|
||||
/// Buffered indentation to avoid writing trailing whitespace
|
||||
pending_indentation: isize,
|
||||
/// The token most recently popped from the left boundary of the
|
||||
|
@ -260,8 +243,8 @@ impl Printer {
|
|||
}
|
||||
|
||||
/// Be very careful with this!
|
||||
pub fn replace_last_token_still_buffered(&mut self, t: Token) {
|
||||
self.buf.last_mut().unwrap().token = t;
|
||||
pub fn replace_last_token_still_buffered(&mut self, token: Token) {
|
||||
self.buf.last_mut().unwrap().token = token;
|
||||
}
|
||||
|
||||
fn scan_eof(&mut self) {
|
||||
|
@ -271,14 +254,14 @@ impl Printer {
|
|||
}
|
||||
}
|
||||
|
||||
fn scan_begin(&mut self, b: BeginToken) {
|
||||
fn scan_begin(&mut self, token: BeginToken) {
|
||||
if self.scan_stack.is_empty() {
|
||||
self.left_total = 1;
|
||||
self.right_total = 1;
|
||||
self.buf.clear();
|
||||
}
|
||||
let right = self.buf.push(BufEntry { token: Token::Begin(b), size: -self.right_total });
|
||||
self.scan_stack.push_front(right);
|
||||
let right = self.buf.push(BufEntry { token: Token::Begin(token), size: -self.right_total });
|
||||
self.scan_stack.push_back(right);
|
||||
}
|
||||
|
||||
fn scan_end(&mut self) {
|
||||
|
@ -286,11 +269,11 @@ impl Printer {
|
|||
self.print_end();
|
||||
} else {
|
||||
let right = self.buf.push(BufEntry { token: Token::End, size: -1 });
|
||||
self.scan_stack.push_front(right);
|
||||
self.scan_stack.push_back(right);
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_break(&mut self, b: BreakToken) {
|
||||
fn scan_break(&mut self, token: BreakToken) {
|
||||
if self.scan_stack.is_empty() {
|
||||
self.left_total = 1;
|
||||
self.right_total = 1;
|
||||
|
@ -298,17 +281,17 @@ impl Printer {
|
|||
} else {
|
||||
self.check_stack(0);
|
||||
}
|
||||
let right = self.buf.push(BufEntry { token: Token::Break(b), size: -self.right_total });
|
||||
self.scan_stack.push_front(right);
|
||||
self.right_total += b.blank_space;
|
||||
let right = self.buf.push(BufEntry { token: Token::Break(token), size: -self.right_total });
|
||||
self.scan_stack.push_back(right);
|
||||
self.right_total += token.blank_space;
|
||||
}
|
||||
|
||||
fn scan_string(&mut self, s: Cow<'static, str>) {
|
||||
fn scan_string(&mut self, string: Cow<'static, str>) {
|
||||
if self.scan_stack.is_empty() {
|
||||
self.print_string(&s);
|
||||
self.print_string(&string);
|
||||
} else {
|
||||
let len = s.len() as isize;
|
||||
self.buf.push(BufEntry { token: Token::String(s), size: len });
|
||||
let len = string.len() as isize;
|
||||
self.buf.push(BufEntry { token: Token::String(string), size: len });
|
||||
self.right_total += len;
|
||||
self.check_stream();
|
||||
}
|
||||
|
@ -316,8 +299,8 @@ impl Printer {
|
|||
|
||||
fn check_stream(&mut self) {
|
||||
while self.right_total - self.left_total > self.space {
|
||||
if *self.scan_stack.back().unwrap() == self.buf.index_of_first() {
|
||||
self.scan_stack.pop_back().unwrap();
|
||||
if *self.scan_stack.front().unwrap() == self.buf.index_of_first() {
|
||||
self.scan_stack.pop_front().unwrap();
|
||||
self.buf.first_mut().unwrap().size = SIZE_INFINITY;
|
||||
}
|
||||
self.advance_left();
|
||||
|
@ -328,56 +311,52 @@ impl Printer {
|
|||
}
|
||||
|
||||
fn advance_left(&mut self) {
|
||||
let mut left_size = self.buf.first().unwrap().size;
|
||||
while self.buf.first().unwrap().size >= 0 {
|
||||
let left = self.buf.pop_first().unwrap();
|
||||
|
||||
while left_size >= 0 {
|
||||
let left = self.buf.first().unwrap().token.clone();
|
||||
|
||||
let len = match left {
|
||||
Token::Break(b) => b.blank_space,
|
||||
Token::String(ref s) => {
|
||||
let len = s.len() as isize;
|
||||
assert_eq!(len, left_size);
|
||||
len
|
||||
match &left.token {
|
||||
Token::String(string) => {
|
||||
self.left_total += string.len() as isize;
|
||||
self.print_string(string);
|
||||
}
|
||||
_ => 0,
|
||||
};
|
||||
Token::Break(token) => {
|
||||
self.left_total += token.blank_space;
|
||||
self.print_break(*token, left.size);
|
||||
}
|
||||
Token::Begin(token) => self.print_begin(*token, left.size),
|
||||
Token::End => self.print_end(),
|
||||
}
|
||||
|
||||
self.print(left, left_size);
|
||||
self.last_printed = Some(left.token);
|
||||
|
||||
self.left_total += len;
|
||||
|
||||
self.buf.advance_left();
|
||||
if self.buf.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
left_size = self.buf.first().unwrap().size;
|
||||
}
|
||||
}
|
||||
|
||||
fn check_stack(&mut self, mut k: usize) {
|
||||
while let Some(&x) = self.scan_stack.front() {
|
||||
let mut entry = &mut self.buf[x];
|
||||
fn check_stack(&mut self, mut depth: usize) {
|
||||
while let Some(&index) = self.scan_stack.back() {
|
||||
let mut entry = &mut self.buf[index];
|
||||
match entry.token {
|
||||
Token::Begin(_) => {
|
||||
if k == 0 {
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
self.scan_stack.pop_front().unwrap();
|
||||
self.scan_stack.pop_back().unwrap();
|
||||
entry.size += self.right_total;
|
||||
k -= 1;
|
||||
depth -= 1;
|
||||
}
|
||||
Token::End => {
|
||||
// paper says + not =, but that makes no sense.
|
||||
self.scan_stack.pop_front().unwrap();
|
||||
self.scan_stack.pop_back().unwrap();
|
||||
entry.size = 1;
|
||||
k += 1;
|
||||
depth += 1;
|
||||
}
|
||||
_ => {
|
||||
self.scan_stack.pop_front().unwrap();
|
||||
self.scan_stack.pop_back().unwrap();
|
||||
entry.size += self.right_total;
|
||||
if k == 0 {
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -385,29 +364,19 @@ impl Printer {
|
|||
}
|
||||
}
|
||||
|
||||
fn print_newline(&mut self, amount: isize) {
|
||||
self.out.push('\n');
|
||||
self.pending_indentation = 0;
|
||||
self.indent(amount);
|
||||
fn get_top(&self) -> PrintFrame {
|
||||
*self
|
||||
.print_stack
|
||||
.last()
|
||||
.unwrap_or(&PrintFrame::Broken { offset: 0, breaks: Breaks::Inconsistent })
|
||||
}
|
||||
|
||||
fn indent(&mut self, amount: isize) {
|
||||
self.pending_indentation += amount;
|
||||
}
|
||||
|
||||
fn get_top(&self) -> PrintStackElem {
|
||||
*self.print_stack.last().unwrap_or({
|
||||
&PrintStackElem { offset: 0, pbreak: PrintStackBreak::Broken(Breaks::Inconsistent) }
|
||||
})
|
||||
}
|
||||
|
||||
fn print_begin(&mut self, b: BeginToken, l: isize) {
|
||||
if l > self.space {
|
||||
let col = self.margin - self.space + b.offset;
|
||||
self.print_stack
|
||||
.push(PrintStackElem { offset: col, pbreak: PrintStackBreak::Broken(b.breaks) });
|
||||
fn print_begin(&mut self, token: BeginToken, size: isize) {
|
||||
if size > self.space {
|
||||
let col = self.margin - self.space + token.offset;
|
||||
self.print_stack.push(PrintFrame::Broken { offset: col, breaks: token.breaks });
|
||||
} else {
|
||||
self.print_stack.push(PrintStackElem { offset: 0, pbreak: PrintStackBreak::Fits });
|
||||
self.print_stack.push(PrintFrame::Fits);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -415,34 +384,26 @@ impl Printer {
|
|||
self.print_stack.pop().unwrap();
|
||||
}
|
||||
|
||||
fn print_break(&mut self, b: BreakToken, l: isize) {
|
||||
let top = self.get_top();
|
||||
match top.pbreak {
|
||||
PrintStackBreak::Fits => {
|
||||
self.space -= b.blank_space;
|
||||
self.indent(b.blank_space);
|
||||
}
|
||||
PrintStackBreak::Broken(Breaks::Consistent) => {
|
||||
self.print_newline(top.offset + b.offset);
|
||||
self.space = self.margin - (top.offset + b.offset);
|
||||
}
|
||||
PrintStackBreak::Broken(Breaks::Inconsistent) => {
|
||||
if l > self.space {
|
||||
self.print_newline(top.offset + b.offset);
|
||||
self.space = self.margin - (top.offset + b.offset);
|
||||
} else {
|
||||
self.indent(b.blank_space);
|
||||
self.space -= b.blank_space;
|
||||
fn print_break(&mut self, token: BreakToken, size: isize) {
|
||||
let break_offset =
|
||||
match self.get_top() {
|
||||
PrintFrame::Fits => None,
|
||||
PrintFrame::Broken { offset, breaks: Breaks::Consistent } => Some(offset),
|
||||
PrintFrame::Broken { offset, breaks: Breaks::Inconsistent } => {
|
||||
if size > self.space { Some(offset) } else { None }
|
||||
}
|
||||
}
|
||||
};
|
||||
if let Some(offset) = break_offset {
|
||||
self.out.push('\n');
|
||||
self.pending_indentation = offset + token.offset;
|
||||
self.space = self.margin - (offset + token.offset);
|
||||
} else {
|
||||
self.pending_indentation += token.blank_space;
|
||||
self.space -= token.blank_space;
|
||||
}
|
||||
}
|
||||
|
||||
fn print_string(&mut self, s: &str) {
|
||||
let len = s.len() as isize;
|
||||
// assert!(len <= space);
|
||||
self.space -= len;
|
||||
|
||||
fn print_string(&mut self, string: &str) {
|
||||
// Write the pending indent. A more concise way of doing this would be:
|
||||
//
|
||||
// write!(self.out, "{: >n$}", "", n = self.pending_indentation as usize)?;
|
||||
|
@ -450,30 +411,18 @@ impl Printer {
|
|||
// But that is significantly slower. This code is sufficiently hot, and indents can get
|
||||
// sufficiently large, that the difference is significant on some workloads.
|
||||
self.out.reserve(self.pending_indentation as usize);
|
||||
self.out.extend(std::iter::repeat(' ').take(self.pending_indentation as usize));
|
||||
self.out.extend(iter::repeat(' ').take(self.pending_indentation as usize));
|
||||
self.pending_indentation = 0;
|
||||
self.out.push_str(s);
|
||||
}
|
||||
|
||||
fn print(&mut self, token: Token, l: isize) {
|
||||
match &token {
|
||||
Token::Begin(b) => self.print_begin(*b, l),
|
||||
Token::End => self.print_end(),
|
||||
Token::Break(b) => self.print_break(*b, l),
|
||||
Token::String(s) => {
|
||||
let len = s.len() as isize;
|
||||
assert_eq!(len, l);
|
||||
self.print_string(s);
|
||||
}
|
||||
}
|
||||
self.last_printed = Some(token);
|
||||
self.out.push_str(string);
|
||||
self.space -= string.len() as isize;
|
||||
}
|
||||
|
||||
// Convenience functions to talk to the printer.
|
||||
|
||||
/// "raw box"
|
||||
pub fn rbox(&mut self, indent: usize, b: Breaks) {
|
||||
self.scan_begin(BeginToken { offset: indent as isize, breaks: b })
|
||||
pub fn rbox(&mut self, indent: usize, breaks: Breaks) {
|
||||
self.scan_begin(BeginToken { offset: indent as isize, breaks })
|
||||
}
|
||||
|
||||
/// Inconsistent breaking box
|
||||
|
@ -500,8 +449,8 @@ impl Printer {
|
|||
}
|
||||
|
||||
pub fn word<S: Into<Cow<'static, str>>>(&mut self, wrd: S) {
|
||||
let s = wrd.into();
|
||||
self.scan_string(s)
|
||||
let string = wrd.into();
|
||||
self.scan_string(string)
|
||||
}
|
||||
|
||||
fn spaces(&mut self, n: usize) {
|
||||
|
|
|
@ -32,11 +32,6 @@ impl<T> RingBuffer<T> {
|
|||
index
|
||||
}
|
||||
|
||||
pub fn advance_left(&mut self) {
|
||||
self.data.pop_front().unwrap();
|
||||
self.offset += 1;
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.data.clear();
|
||||
}
|
||||
|
@ -53,6 +48,12 @@ impl<T> RingBuffer<T> {
|
|||
self.data.front_mut()
|
||||
}
|
||||
|
||||
pub fn pop_first(&mut self) -> Option<T> {
|
||||
let first = self.data.pop_front()?;
|
||||
self.offset += 1;
|
||||
Some(first)
|
||||
}
|
||||
|
||||
pub fn last(&self) -> Option<&T> {
|
||||
self.data.back()
|
||||
}
|
||||
|
|
|
@ -3,7 +3,11 @@ use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitor};
|
|||
use std::convert::TryInto;
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
/// Returns `true` if a used generic parameter requires substitution.
|
||||
/// Checks whether a type contains generic parameters which require substitution.
|
||||
///
|
||||
/// In case it does, returns a `TooGeneric` const eval error. Note that due to polymorphization
|
||||
/// types may be "concrete enough" even though they still contain generic parameters in
|
||||
/// case these parameters are unused.
|
||||
crate fn ensure_monomorphic_enough<'tcx, T>(tcx: TyCtxt<'tcx>, ty: T) -> InterpResult<'tcx>
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
|
|
|
@ -449,13 +449,17 @@ impl Definitions {
|
|||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn local_def_path_hash_to_def_id(&self, hash: DefPathHash) -> LocalDefId {
|
||||
pub fn local_def_path_hash_to_def_id(
|
||||
&self,
|
||||
hash: DefPathHash,
|
||||
err: &mut dyn FnMut() -> !,
|
||||
) -> LocalDefId {
|
||||
debug_assert!(hash.stable_crate_id() == self.stable_crate_id);
|
||||
self.table
|
||||
.def_path_hash_to_index
|
||||
.get(&hash)
|
||||
.map(|local_def_index| LocalDefId { local_def_index })
|
||||
.unwrap()
|
||||
.unwrap_or_else(|| err())
|
||||
}
|
||||
|
||||
pub fn def_path_hash_to_def_index_map(&self) -> &DefPathHashMap {
|
||||
|
|
|
@ -29,7 +29,7 @@ rustc_index = { path = "../rustc_index" }
|
|||
rustc_serialize = { path = "../rustc_serialize" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
chalk-ir = "0.75.0"
|
||||
chalk-ir = "0.76.0"
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
rustc_type_ir = { path = "../rustc_type_ir" }
|
||||
|
|
|
@ -266,7 +266,9 @@ impl DepNodeExt for DepNode {
|
|||
/// has been removed.
|
||||
fn extract_def_id<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Option<DefId> {
|
||||
if self.kind.fingerprint_style(tcx) == FingerprintStyle::DefPathHash {
|
||||
Some(tcx.def_path_hash_to_def_id(DefPathHash(self.hash.into())))
|
||||
Some(tcx.def_path_hash_to_def_id(DefPathHash(self.hash.into()), &mut || {
|
||||
panic!("Failed to extract DefId: {:?} {}", self.kind, self.hash)
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -308,7 +308,7 @@ pub struct ScopeTree {
|
|||
/// The reason is that semantically, until the `box` expression returns,
|
||||
/// the values are still owned by their containing expressions. So
|
||||
/// we'll see that `&x`.
|
||||
pub yield_in_scope: FxHashMap<Scope, YieldData>,
|
||||
pub yield_in_scope: FxHashMap<Scope, Vec<YieldData>>,
|
||||
|
||||
/// The number of visit_expr and visit_pat calls done in the body.
|
||||
/// Used to sanity check visit_expr/visit_pat call count when
|
||||
|
@ -423,8 +423,8 @@ impl ScopeTree {
|
|||
|
||||
/// Checks whether the given scope contains a `yield`. If so,
|
||||
/// returns `Some(YieldData)`. If not, returns `None`.
|
||||
pub fn yield_in_scope(&self, scope: Scope) -> Option<YieldData> {
|
||||
self.yield_in_scope.get(&scope).cloned()
|
||||
pub fn yield_in_scope(&self, scope: Scope) -> Option<&Vec<YieldData>> {
|
||||
self.yield_in_scope.get(&scope)
|
||||
}
|
||||
|
||||
/// Gives the number of expressions visited in a body.
|
||||
|
|
|
@ -1322,7 +1322,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
/// Converts a `DefPathHash` to its corresponding `DefId` in the current compilation
|
||||
/// session, if it still exists. This is used during incremental compilation to
|
||||
/// turn a deserialized `DefPathHash` into its current `DefId`.
|
||||
pub fn def_path_hash_to_def_id(self, hash: DefPathHash) -> DefId {
|
||||
pub fn def_path_hash_to_def_id(self, hash: DefPathHash, err: &mut dyn FnMut() -> !) -> DefId {
|
||||
debug!("def_path_hash_to_def_id({:?})", hash);
|
||||
|
||||
let stable_crate_id = hash.stable_crate_id();
|
||||
|
@ -1330,7 +1330,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
// If this is a DefPathHash from the local crate, we can look up the
|
||||
// DefId in the tcx's `Definitions`.
|
||||
if stable_crate_id == self.sess.local_stable_crate_id() {
|
||||
self.untracked_resolutions.definitions.local_def_path_hash_to_def_id(hash).to_def_id()
|
||||
self.untracked_resolutions
|
||||
.definitions
|
||||
.local_def_path_hash_to_def_id(hash, err)
|
||||
.to_def_id()
|
||||
} else {
|
||||
// If this is a DefPathHash from an upstream crate, let the CrateStore map
|
||||
// it to a DefId.
|
||||
|
|
|
@ -366,7 +366,8 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
|
|||
let target_scopes = visitor.fixup_scopes.drain(start_point..);
|
||||
|
||||
for scope in target_scopes {
|
||||
let mut yield_data = visitor.scope_tree.yield_in_scope.get_mut(&scope).unwrap();
|
||||
let mut yield_data =
|
||||
visitor.scope_tree.yield_in_scope.get_mut(&scope).unwrap().last_mut().unwrap();
|
||||
let count = yield_data.expr_and_pat_count;
|
||||
let span = yield_data.span;
|
||||
|
||||
|
@ -429,7 +430,13 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
|
|||
};
|
||||
let data =
|
||||
YieldData { span, expr_and_pat_count: visitor.expr_and_pat_count, source: *source };
|
||||
visitor.scope_tree.yield_in_scope.insert(scope, data);
|
||||
match visitor.scope_tree.yield_in_scope.get_mut(&scope) {
|
||||
Some(yields) => yields.push(data),
|
||||
None => {
|
||||
visitor.scope_tree.yield_in_scope.insert(scope, vec![data]);
|
||||
}
|
||||
}
|
||||
|
||||
if visitor.pessimistic_yield {
|
||||
debug!("resolve_expr in pessimistic_yield - marking scope {:?} for fixup", scope);
|
||||
visitor.fixup_scopes.push(scope);
|
||||
|
|
|
@ -761,7 +761,9 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
|
|||
// If we get to this point, then all of the query inputs were green,
|
||||
// which means that the definition with this hash is guaranteed to
|
||||
// still exist in the current compilation session.
|
||||
Ok(d.tcx().def_path_hash_to_def_id(def_path_hash))
|
||||
Ok(d.tcx().def_path_hash_to_def_id(def_path_hash, &mut || {
|
||||
panic!("Failed to convert DefPathHash {:?}", def_path_hash)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2517,6 +2517,10 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
|
|||
self.visit_expr(elem);
|
||||
self.resolve_anon_const(ct, IsRepeatExpr::Yes);
|
||||
}
|
||||
ExprKind::Index(ref elem, ref idx) => {
|
||||
self.resolve_expr(elem, Some(expr));
|
||||
self.visit_expr(idx);
|
||||
}
|
||||
_ => {
|
||||
visit::walk_expr(self, expr);
|
||||
}
|
||||
|
|
|
@ -970,7 +970,13 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
|
|||
};
|
||||
|
||||
match (res, source) {
|
||||
(Res::Def(DefKind::Macro(MacroKind::Bang), _), _) => {
|
||||
(
|
||||
Res::Def(DefKind::Macro(MacroKind::Bang), _),
|
||||
PathSource::Expr(Some(Expr {
|
||||
kind: ExprKind::Index(..) | ExprKind::Call(..), ..
|
||||
}))
|
||||
| PathSource::Struct,
|
||||
) => {
|
||||
err.span_label(span, fallback_label);
|
||||
err.span_suggestion_verbose(
|
||||
span.shrink_to_hi(),
|
||||
|
@ -982,6 +988,9 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
|
|||
err.note("if you want the `try` keyword, you need Rust 2018 or later");
|
||||
}
|
||||
}
|
||||
(Res::Def(DefKind::Macro(MacroKind::Bang), _), _) => {
|
||||
err.span_label(span, fallback_label);
|
||||
}
|
||||
(Res::Def(DefKind::TyAlias, def_id), PathSource::Trait(_)) => {
|
||||
err.span_label(span, "type aliases cannot be used as traits");
|
||||
if self.r.session.is_nightly_build() {
|
||||
|
|
|
@ -12,9 +12,9 @@ rustc_hir = { path = "../rustc_hir" }
|
|||
rustc_index = { path = "../rustc_index" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
chalk-ir = "0.75.0"
|
||||
chalk-engine = "0.75.0"
|
||||
chalk-solve = "0.75.0"
|
||||
chalk-ir = "0.76.0"
|
||||
chalk-engine = "0.76.0"
|
||||
chalk-solve = "0.76.0"
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
rustc_infer = { path = "../rustc_infer" }
|
||||
rustc_trait_selection = { path = "../rustc_trait_selection" }
|
||||
|
|
|
@ -15,6 +15,7 @@ rustc_middle = { path = "../rustc_middle" }
|
|||
rustc_attr = { path = "../rustc_attr" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_graphviz = { path = "../rustc_graphviz" }
|
||||
rustc_hir = { path = "../rustc_hir" }
|
||||
rustc_hir_pretty = { path = "../rustc_hir_pretty" }
|
||||
rustc_target = { path = "../rustc_target" }
|
||||
|
@ -27,3 +28,4 @@ rustc_infer = { path = "../rustc_infer" }
|
|||
rustc_trait_selection = { path = "../rustc_trait_selection" }
|
||||
rustc_ty_utils = { path = "../rustc_ty_utils" }
|
||||
rustc_lint = { path = "../rustc_lint" }
|
||||
rustc_serialize = { path = "../rustc_serialize" }
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
//! is calculated in `rustc_const_eval::transform::generator` and may be a subset of the
|
||||
//! types computed here.
|
||||
|
||||
use self::drop_ranges::DropRanges;
|
||||
use super::FnCtxt;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
|
||||
use rustc_errors::pluralize;
|
||||
|
@ -19,6 +20,8 @@ use rustc_span::Span;
|
|||
use smallvec::SmallVec;
|
||||
use tracing::debug;
|
||||
|
||||
mod drop_ranges;
|
||||
|
||||
struct InteriorVisitor<'a, 'tcx> {
|
||||
fcx: &'a FnCtxt<'a, 'tcx>,
|
||||
types: FxIndexSet<ty::GeneratorInteriorTypeCause<'tcx>>,
|
||||
|
@ -34,6 +37,7 @@ struct InteriorVisitor<'a, 'tcx> {
|
|||
guard_bindings: SmallVec<[SmallVec<[HirId; 4]>; 1]>,
|
||||
guard_bindings_set: HirIdSet,
|
||||
linted_values: HirIdSet,
|
||||
drop_ranges: DropRanges,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
|
||||
|
@ -48,9 +52,11 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
|
|||
) {
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
||||
let ty = self.fcx.resolve_vars_if_possible(ty);
|
||||
|
||||
debug!(
|
||||
"generator_interior: attempting to record type {:?} {:?} {:?} {:?}",
|
||||
ty, scope, expr, source_span
|
||||
"attempting to record type ty={:?}; hir_id={:?}; scope={:?}; expr={:?}; source_span={:?}; expr_count={:?}",
|
||||
ty, hir_id, scope, expr, source_span, self.expr_count,
|
||||
);
|
||||
|
||||
let live_across_yield = scope
|
||||
|
@ -63,21 +69,27 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
|
|||
//
|
||||
// See the mega-comment at `yield_in_scope` for a proof.
|
||||
|
||||
debug!(
|
||||
"comparing counts yield: {} self: {}, source_span = {:?}",
|
||||
yield_data.expr_and_pat_count, self.expr_count, source_span
|
||||
);
|
||||
yield_data
|
||||
.iter()
|
||||
.find(|yield_data| {
|
||||
debug!(
|
||||
"comparing counts yield: {} self: {}, source_span = {:?}",
|
||||
yield_data.expr_and_pat_count, self.expr_count, source_span
|
||||
);
|
||||
|
||||
// If it is a borrowing happening in the guard,
|
||||
// it needs to be recorded regardless because they
|
||||
// do live across this yield point.
|
||||
if guard_borrowing_from_pattern
|
||||
|| yield_data.expr_and_pat_count >= self.expr_count
|
||||
{
|
||||
Some(yield_data)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if self.drop_ranges.is_dropped_at(hir_id, yield_data.expr_and_pat_count)
|
||||
{
|
||||
debug!("value is dropped at yield point; not recording");
|
||||
return false;
|
||||
}
|
||||
|
||||
// If it is a borrowing happening in the guard,
|
||||
// it needs to be recorded regardless because they
|
||||
// do live across this yield point.
|
||||
guard_borrowing_from_pattern
|
||||
|| yield_data.expr_and_pat_count >= self.expr_count
|
||||
})
|
||||
.cloned()
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
|
@ -85,7 +97,6 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
|
|||
});
|
||||
|
||||
if let Some(yield_data) = live_across_yield {
|
||||
let ty = self.fcx.resolve_vars_if_possible(ty);
|
||||
debug!(
|
||||
"type in expr = {:?}, scope = {:?}, type = {:?}, count = {}, yield_span = {:?}",
|
||||
expr, scope, ty, self.expr_count, yield_data.span
|
||||
|
@ -154,7 +165,6 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
|
|||
self.expr_count,
|
||||
expr.map(|e| e.span)
|
||||
);
|
||||
let ty = self.fcx.resolve_vars_if_possible(ty);
|
||||
if let Some((unresolved_type, unresolved_type_span)) =
|
||||
self.fcx.unresolved_type_vars(&ty)
|
||||
{
|
||||
|
@ -186,6 +196,7 @@ pub fn resolve_interior<'a, 'tcx>(
|
|||
guard_bindings: <_>::default(),
|
||||
guard_bindings_set: <_>::default(),
|
||||
linted_values: <_>::default(),
|
||||
drop_ranges: drop_ranges::compute_drop_ranges(fcx, def_id, body),
|
||||
};
|
||||
intravisit::walk_body(&mut visitor, body);
|
||||
|
||||
|
@ -313,6 +324,7 @@ impl<'a, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'tcx> {
|
|||
|
||||
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
|
||||
let mut guard_borrowing_from_pattern = false;
|
||||
|
||||
match &expr.kind {
|
||||
ExprKind::Call(callee, args) => match &callee.kind {
|
||||
ExprKind::Path(qpath) => {
|
||||
|
|
|
@ -0,0 +1,269 @@
|
|||
//! Drop range analysis finds the portions of the tree where a value is guaranteed to be dropped
|
||||
//! (i.e. moved, uninitialized, etc.). This is used to exclude the types of those values from the
|
||||
//! generator type. See `InteriorVisitor::record` for where the results of this analysis are used.
|
||||
//!
|
||||
//! There are three phases to this analysis:
|
||||
//! 1. Use `ExprUseVisitor` to identify the interesting values that are consumed and borrowed.
|
||||
//! 2. Use `DropRangeVisitor` to find where the interesting values are dropped or reinitialized,
|
||||
//! and also build a control flow graph.
|
||||
//! 3. Use `DropRanges::propagate_to_fixpoint` to flow the dropped/reinitialized information through
|
||||
//! the CFG and find the exact points where we know a value is definitely dropped.
|
||||
//!
|
||||
//! The end result is a data structure that maps the post-order index of each node in the HIR tree
|
||||
//! to a set of values that are known to be dropped at that location.
|
||||
|
||||
use self::cfg_build::build_control_flow_graph;
|
||||
use self::record_consumed_borrow::find_consumed_and_borrowed;
|
||||
use crate::check::FnCtxt;
|
||||
use hir::def_id::DefId;
|
||||
use hir::{Body, HirId, HirIdMap, Node};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::hir::map::Map;
|
||||
use rustc_middle::hir::place::{PlaceBase, PlaceWithHirId};
|
||||
use rustc_middle::ty;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Debug;
|
||||
|
||||
mod cfg_build;
|
||||
mod cfg_propagate;
|
||||
mod cfg_visualize;
|
||||
mod record_consumed_borrow;
|
||||
|
||||
pub fn compute_drop_ranges<'a, 'tcx>(
|
||||
fcx: &'a FnCtxt<'a, 'tcx>,
|
||||
def_id: DefId,
|
||||
body: &'tcx Body<'tcx>,
|
||||
) -> DropRanges {
|
||||
let consumed_borrowed_places = find_consumed_and_borrowed(fcx, def_id, body);
|
||||
|
||||
let num_exprs = fcx.tcx.region_scope_tree(def_id).body_expr_count(body.id()).unwrap_or(0);
|
||||
let mut drop_ranges = build_control_flow_graph(
|
||||
fcx.tcx.hir(),
|
||||
fcx.tcx,
|
||||
&fcx.typeck_results.borrow(),
|
||||
consumed_borrowed_places,
|
||||
body,
|
||||
num_exprs,
|
||||
);
|
||||
|
||||
drop_ranges.propagate_to_fixpoint();
|
||||
|
||||
DropRanges { tracked_value_map: drop_ranges.tracked_value_map, nodes: drop_ranges.nodes }
|
||||
}
|
||||
|
||||
/// Applies `f` to consumable node in the HIR subtree pointed to by `place`.
|
||||
///
|
||||
/// This includes the place itself, and if the place is a reference to a local
|
||||
/// variable then `f` is also called on the HIR node for that variable as well.
|
||||
///
|
||||
/// For example, if `place` points to `foo()`, then `f` is called once for the
|
||||
/// result of `foo`. On the other hand, if `place` points to `x` then `f` will
|
||||
/// be called both on the `ExprKind::Path` node that represents the expression
|
||||
/// as well as the HirId of the local `x` itself.
|
||||
fn for_each_consumable<'tcx>(hir: Map<'tcx>, place: TrackedValue, mut f: impl FnMut(TrackedValue)) {
|
||||
f(place);
|
||||
let node = hir.find(place.hir_id());
|
||||
if let Some(Node::Expr(expr)) = node {
|
||||
match expr.kind {
|
||||
hir::ExprKind::Path(hir::QPath::Resolved(
|
||||
_,
|
||||
hir::Path { res: hir::def::Res::Local(hir_id), .. },
|
||||
)) => {
|
||||
f(TrackedValue::Variable(*hir_id));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
pub struct PostOrderId {
|
||||
DEBUG_FORMAT = "id({})",
|
||||
}
|
||||
}
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
pub struct TrackedValueIndex {
|
||||
DEBUG_FORMAT = "hidx({})",
|
||||
}
|
||||
}
|
||||
|
||||
/// Identifies a value whose drop state we need to track.
|
||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy)]
|
||||
enum TrackedValue {
|
||||
/// Represents a named variable, such as a let binding, parameter, or upvar.
|
||||
///
|
||||
/// The HirId points to the variable's definition site.
|
||||
Variable(HirId),
|
||||
/// A value produced as a result of an expression.
|
||||
///
|
||||
/// The HirId points to the expression that returns this value.
|
||||
Temporary(HirId),
|
||||
}
|
||||
|
||||
impl TrackedValue {
|
||||
fn hir_id(&self) -> HirId {
|
||||
match self {
|
||||
TrackedValue::Variable(hir_id) | TrackedValue::Temporary(hir_id) => *hir_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a reason why we might not be able to convert a HirId or Place
|
||||
/// into a tracked value.
|
||||
#[derive(Debug)]
|
||||
enum TrackedValueConversionError {
|
||||
/// Place projects are not currently supported.
|
||||
///
|
||||
/// The reasoning around these is kind of subtle, so we choose to be more
|
||||
/// conservative around these for now. There is not reason in theory we
|
||||
/// cannot support these, we just have not implemented it yet.
|
||||
PlaceProjectionsNotSupported,
|
||||
}
|
||||
|
||||
impl TryFrom<&PlaceWithHirId<'_>> for TrackedValue {
|
||||
type Error = TrackedValueConversionError;
|
||||
|
||||
fn try_from(place_with_id: &PlaceWithHirId<'_>) -> Result<Self, Self::Error> {
|
||||
if !place_with_id.place.projections.is_empty() {
|
||||
debug!(
|
||||
"TrackedValue from PlaceWithHirId: {:?} has projections, which are not supported.",
|
||||
place_with_id
|
||||
);
|
||||
return Err(TrackedValueConversionError::PlaceProjectionsNotSupported);
|
||||
}
|
||||
|
||||
match place_with_id.place.base {
|
||||
PlaceBase::Rvalue | PlaceBase::StaticItem => {
|
||||
Ok(TrackedValue::Temporary(place_with_id.hir_id))
|
||||
}
|
||||
PlaceBase::Local(hir_id)
|
||||
| PlaceBase::Upvar(ty::UpvarId { var_path: ty::UpvarPath { hir_id }, .. }) => {
|
||||
Ok(TrackedValue::Variable(hir_id))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DropRanges {
|
||||
tracked_value_map: FxHashMap<TrackedValue, TrackedValueIndex>,
|
||||
nodes: IndexVec<PostOrderId, NodeInfo>,
|
||||
}
|
||||
|
||||
impl DropRanges {
|
||||
pub fn is_dropped_at(&self, hir_id: HirId, location: usize) -> bool {
|
||||
self.tracked_value_map
|
||||
.get(&TrackedValue::Temporary(hir_id))
|
||||
.or(self.tracked_value_map.get(&TrackedValue::Variable(hir_id)))
|
||||
.cloned()
|
||||
.map_or(false, |tracked_value_id| {
|
||||
self.expect_node(location.into()).drop_state.contains(tracked_value_id)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a reference to the NodeInfo for a node, panicking if it does not exist
|
||||
fn expect_node(&self, id: PostOrderId) -> &NodeInfo {
|
||||
&self.nodes[id]
|
||||
}
|
||||
}
|
||||
|
||||
/// Tracks information needed to compute drop ranges.
|
||||
struct DropRangesBuilder {
|
||||
/// The core of DropRangesBuilder is a set of nodes, which each represent
|
||||
/// one expression. We primarily refer to them by their index in a
|
||||
/// post-order traversal of the HIR tree, since this is what
|
||||
/// generator_interior uses to talk about yield positions.
|
||||
///
|
||||
/// This IndexVec keeps the relevant details for each node. See the
|
||||
/// NodeInfo struct for more details, but this information includes things
|
||||
/// such as the set of control-flow successors, which variables are dropped
|
||||
/// or reinitialized, and whether each variable has been inferred to be
|
||||
/// known-dropped or potentially reintiialized at each point.
|
||||
nodes: IndexVec<PostOrderId, NodeInfo>,
|
||||
/// We refer to values whose drop state we are tracking by the HirId of
|
||||
/// where they are defined. Within a NodeInfo, however, we store the
|
||||
/// drop-state in a bit vector indexed by a HirIdIndex
|
||||
/// (see NodeInfo::drop_state). The hir_id_map field stores the mapping
|
||||
/// from HirIds to the HirIdIndex that is used to represent that value in
|
||||
/// bitvector.
|
||||
tracked_value_map: FxHashMap<TrackedValue, TrackedValueIndex>,
|
||||
|
||||
/// When building the control flow graph, we don't always know the
|
||||
/// post-order index of the target node at the point we encounter it.
|
||||
/// For example, this happens with break and continue. In those cases,
|
||||
/// we store a pair of the PostOrderId of the source and the HirId
|
||||
/// of the target. Once we have gathered all of these edges, we make a
|
||||
/// pass over the set of deferred edges (see process_deferred_edges in
|
||||
/// cfg_build.rs), look up the PostOrderId for the target (since now the
|
||||
/// post-order index for all nodes is known), and add missing control flow
|
||||
/// edges.
|
||||
deferred_edges: Vec<(PostOrderId, HirId)>,
|
||||
/// This maps HirIds of expressions to their post-order index. It is
|
||||
/// used in process_deferred_edges to correctly add back-edges.
|
||||
post_order_map: HirIdMap<PostOrderId>,
|
||||
}
|
||||
|
||||
impl Debug for DropRangesBuilder {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("DropRanges")
|
||||
.field("hir_id_map", &self.tracked_value_map)
|
||||
.field("post_order_maps", &self.post_order_map)
|
||||
.field("nodes", &self.nodes.iter_enumerated().collect::<BTreeMap<_, _>>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// DropRanges keeps track of what values are definitely dropped at each point in the code.
|
||||
///
|
||||
/// Values of interest are defined by the hir_id of their place. Locations in code are identified
|
||||
/// by their index in the post-order traversal. At its core, DropRanges maps
|
||||
/// (hir_id, post_order_id) -> bool, where a true value indicates that the value is definitely
|
||||
/// dropped at the point of the node identified by post_order_id.
|
||||
impl DropRangesBuilder {
|
||||
/// Returns the number of values (hir_ids) that are tracked
|
||||
fn num_values(&self) -> usize {
|
||||
self.tracked_value_map.len()
|
||||
}
|
||||
|
||||
fn node_mut(&mut self, id: PostOrderId) -> &mut NodeInfo {
|
||||
let size = self.num_values();
|
||||
self.nodes.ensure_contains_elem(id, || NodeInfo::new(size));
|
||||
&mut self.nodes[id]
|
||||
}
|
||||
|
||||
fn add_control_edge(&mut self, from: PostOrderId, to: PostOrderId) {
|
||||
trace!("adding control edge from {:?} to {:?}", from, to);
|
||||
self.node_mut(from.into()).successors.push(to.into());
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeInfo {
|
||||
/// IDs of nodes that can follow this one in the control flow
|
||||
///
|
||||
/// If the vec is empty, then control proceeds to the next node.
|
||||
successors: Vec<PostOrderId>,
|
||||
|
||||
/// List of hir_ids that are dropped by this node.
|
||||
drops: Vec<TrackedValueIndex>,
|
||||
|
||||
/// List of hir_ids that are reinitialized by this node.
|
||||
reinits: Vec<TrackedValueIndex>,
|
||||
|
||||
/// Set of values that are definitely dropped at this point.
|
||||
drop_state: BitSet<TrackedValueIndex>,
|
||||
}
|
||||
|
||||
impl NodeInfo {
|
||||
fn new(num_values: usize) -> Self {
|
||||
Self {
|
||||
successors: vec![],
|
||||
drops: vec![],
|
||||
reinits: vec![],
|
||||
drop_state: BitSet::new_filled(num_values),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,473 @@
|
|||
use super::{
|
||||
for_each_consumable, record_consumed_borrow::ConsumedAndBorrowedPlaces, DropRangesBuilder,
|
||||
NodeInfo, PostOrderId, TrackedValue, TrackedValueIndex,
|
||||
};
|
||||
use hir::{
|
||||
intravisit::{self, Visitor},
|
||||
Body, Expr, ExprKind, Guard, HirId,
|
||||
};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::{
|
||||
hir::map::Map,
|
||||
ty::{TyCtxt, TypeckResults},
|
||||
};
|
||||
use std::mem::swap;
|
||||
|
||||
/// Traverses the body to find the control flow graph and locations for the
|
||||
/// relevant places are dropped or reinitialized.
|
||||
///
|
||||
/// The resulting structure still needs to be iterated to a fixed point, which
|
||||
/// can be done with propagate_to_fixpoint in cfg_propagate.
|
||||
pub(super) fn build_control_flow_graph<'tcx>(
|
||||
hir: Map<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
typeck_results: &TypeckResults<'tcx>,
|
||||
consumed_borrowed_places: ConsumedAndBorrowedPlaces,
|
||||
body: &'tcx Body<'tcx>,
|
||||
num_exprs: usize,
|
||||
) -> DropRangesBuilder {
|
||||
let mut drop_range_visitor =
|
||||
DropRangeVisitor::new(hir, tcx, typeck_results, consumed_borrowed_places, num_exprs);
|
||||
intravisit::walk_body(&mut drop_range_visitor, body);
|
||||
|
||||
drop_range_visitor.drop_ranges.process_deferred_edges();
|
||||
|
||||
drop_range_visitor.drop_ranges
|
||||
}
|
||||
|
||||
/// This struct is used to gather the information for `DropRanges` to determine the regions of the
|
||||
/// HIR tree for which a value is dropped.
|
||||
///
|
||||
/// We are interested in points where a variables is dropped or initialized, and the control flow
|
||||
/// of the code. We identify locations in code by their post-order traversal index, so it is
|
||||
/// important for this traversal to match that in `RegionResolutionVisitor` and `InteriorVisitor`.
|
||||
///
|
||||
/// We make several simplifying assumptions, with the goal of being more conservative than
|
||||
/// necessary rather than less conservative (since being less conservative is unsound, but more
|
||||
/// conservative is still safe). These assumptions are:
|
||||
///
|
||||
/// 1. Moving a variable `a` counts as a move of the whole variable.
|
||||
/// 2. Moving a partial path like `a.b.c` is ignored.
|
||||
/// 3. Reinitializing through a field (e.g. `a.b.c = 5`) counds as a reinitialization of all of
|
||||
/// `a`.
|
||||
///
|
||||
/// Some examples:
|
||||
///
|
||||
/// Rule 1:
|
||||
/// ```rust
|
||||
/// let mut a = (vec![0], vec![0]);
|
||||
/// drop(a);
|
||||
/// // `a` is not considered initialized.
|
||||
/// ```
|
||||
///
|
||||
/// Rule 2:
|
||||
/// ```rust
|
||||
/// let mut a = (vec![0], vec![0]);
|
||||
/// drop(a.0);
|
||||
/// drop(a.1);
|
||||
/// // `a` is still considered initialized.
|
||||
/// ```
|
||||
///
|
||||
/// Rule 3:
|
||||
/// ```rust
|
||||
/// let mut a = (vec![0], vec![0]);
|
||||
/// drop(a);
|
||||
/// a.1 = vec![1];
|
||||
/// // all of `a` is considered initialized
|
||||
/// ```
|
||||
|
||||
struct DropRangeVisitor<'a, 'tcx> {
|
||||
hir: Map<'tcx>,
|
||||
places: ConsumedAndBorrowedPlaces,
|
||||
drop_ranges: DropRangesBuilder,
|
||||
expr_index: PostOrderId,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
typeck_results: &'a TypeckResults<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DropRangeVisitor<'a, 'tcx> {
|
||||
fn new(
|
||||
hir: Map<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
typeck_results: &'a TypeckResults<'tcx>,
|
||||
places: ConsumedAndBorrowedPlaces,
|
||||
num_exprs: usize,
|
||||
) -> Self {
|
||||
debug!("consumed_places: {:?}", places.consumed);
|
||||
let drop_ranges = DropRangesBuilder::new(
|
||||
places.consumed.iter().flat_map(|(_, places)| places.iter().cloned()),
|
||||
hir,
|
||||
num_exprs,
|
||||
);
|
||||
Self { hir, places, drop_ranges, expr_index: PostOrderId::from_u32(0), typeck_results, tcx }
|
||||
}
|
||||
|
||||
fn record_drop(&mut self, value: TrackedValue) {
|
||||
if self.places.borrowed.contains(&value) {
|
||||
debug!("not marking {:?} as dropped because it is borrowed at some point", value);
|
||||
} else {
|
||||
debug!("marking {:?} as dropped at {:?}", value, self.expr_index);
|
||||
let count = self.expr_index;
|
||||
self.drop_ranges.drop_at(value, count);
|
||||
}
|
||||
}
|
||||
|
||||
/// ExprUseVisitor's consume callback doesn't go deep enough for our purposes in all
|
||||
/// expressions. This method consumes a little deeper into the expression when needed.
|
||||
fn consume_expr(&mut self, expr: &hir::Expr<'_>) {
|
||||
debug!("consuming expr {:?}, count={:?}", expr.hir_id, self.expr_index);
|
||||
let places = self
|
||||
.places
|
||||
.consumed
|
||||
.get(&expr.hir_id)
|
||||
.map_or(vec![], |places| places.iter().cloned().collect());
|
||||
for place in places {
|
||||
for_each_consumable(self.hir, place, |value| self.record_drop(value));
|
||||
}
|
||||
}
|
||||
|
||||
/// Marks an expression as being reinitialized.
|
||||
///
|
||||
/// Note that we always approximated on the side of things being more
|
||||
/// initialized than they actually are, as opposed to less. In cases such
|
||||
/// as `x.y = ...`, we would consider all of `x` as being initialized
|
||||
/// instead of just the `y` field.
|
||||
///
|
||||
/// This is because it is always safe to consider something initialized
|
||||
/// even when it is not, but the other way around will cause problems.
|
||||
///
|
||||
/// In the future, we will hopefully tighten up these rules to be more
|
||||
/// precise.
|
||||
fn reinit_expr(&mut self, expr: &hir::Expr<'_>) {
|
||||
// Walk the expression to find the base. For example, in an expression
|
||||
// like `*a[i].x`, we want to find the `a` and mark that as
|
||||
// reinitialized.
|
||||
match expr.kind {
|
||||
ExprKind::Path(hir::QPath::Resolved(
|
||||
_,
|
||||
hir::Path { res: hir::def::Res::Local(hir_id), .. },
|
||||
)) => {
|
||||
// This is the base case, where we have found an actual named variable.
|
||||
|
||||
let location = self.expr_index;
|
||||
debug!("reinitializing {:?} at {:?}", hir_id, location);
|
||||
self.drop_ranges.reinit_at(TrackedValue::Variable(*hir_id), location);
|
||||
}
|
||||
|
||||
ExprKind::Field(base, _) => self.reinit_expr(base),
|
||||
|
||||
// Most expressions do not refer to something where we need to track
|
||||
// reinitializations.
|
||||
//
|
||||
// Some of these may be interesting in the future
|
||||
ExprKind::Path(..)
|
||||
| ExprKind::Box(..)
|
||||
| ExprKind::ConstBlock(..)
|
||||
| ExprKind::Array(..)
|
||||
| ExprKind::Call(..)
|
||||
| ExprKind::MethodCall(..)
|
||||
| ExprKind::Tup(..)
|
||||
| ExprKind::Binary(..)
|
||||
| ExprKind::Unary(..)
|
||||
| ExprKind::Lit(..)
|
||||
| ExprKind::Cast(..)
|
||||
| ExprKind::Type(..)
|
||||
| ExprKind::DropTemps(..)
|
||||
| ExprKind::Let(..)
|
||||
| ExprKind::If(..)
|
||||
| ExprKind::Loop(..)
|
||||
| ExprKind::Match(..)
|
||||
| ExprKind::Closure(..)
|
||||
| ExprKind::Block(..)
|
||||
| ExprKind::Assign(..)
|
||||
| ExprKind::AssignOp(..)
|
||||
| ExprKind::Index(..)
|
||||
| ExprKind::AddrOf(..)
|
||||
| ExprKind::Break(..)
|
||||
| ExprKind::Continue(..)
|
||||
| ExprKind::Ret(..)
|
||||
| ExprKind::InlineAsm(..)
|
||||
| ExprKind::Struct(..)
|
||||
| ExprKind::Repeat(..)
|
||||
| ExprKind::Yield(..)
|
||||
| ExprKind::Err => (),
|
||||
}
|
||||
}
|
||||
|
||||
/// For an expression with an uninhabited return type (e.g. a function that returns !),
|
||||
/// this adds a self edge to to the CFG to model the fact that the function does not
|
||||
/// return.
|
||||
fn handle_uninhabited_return(&mut self, expr: &Expr<'tcx>) {
|
||||
let ty = self.typeck_results.expr_ty(expr);
|
||||
let ty = self.tcx.erase_regions(ty);
|
||||
let m = self.tcx.parent_module(expr.hir_id).to_def_id();
|
||||
let param_env = self.tcx.param_env(m.expect_local());
|
||||
if self.tcx.is_ty_uninhabited_from(m, ty, param_env) {
|
||||
// This function will not return. We model this fact as an infinite loop.
|
||||
self.drop_ranges.add_control_edge(self.expr_index + 1, self.expr_index + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for DropRangeVisitor<'a, 'tcx> {
|
||||
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
|
||||
let mut reinit = None;
|
||||
match expr.kind {
|
||||
ExprKind::Assign(lhs, rhs, _) => {
|
||||
self.visit_expr(lhs);
|
||||
self.visit_expr(rhs);
|
||||
|
||||
reinit = Some(lhs);
|
||||
}
|
||||
|
||||
ExprKind::If(test, if_true, if_false) => {
|
||||
self.visit_expr(test);
|
||||
|
||||
let fork = self.expr_index;
|
||||
|
||||
self.drop_ranges.add_control_edge(fork, self.expr_index + 1);
|
||||
self.visit_expr(if_true);
|
||||
let true_end = self.expr_index;
|
||||
|
||||
self.drop_ranges.add_control_edge(fork, self.expr_index + 1);
|
||||
if let Some(if_false) = if_false {
|
||||
self.visit_expr(if_false);
|
||||
}
|
||||
|
||||
self.drop_ranges.add_control_edge(true_end, self.expr_index + 1);
|
||||
}
|
||||
ExprKind::Match(scrutinee, arms, ..) => {
|
||||
// We walk through the match expression almost like a chain of if expressions.
|
||||
// Here's a diagram to follow along with:
|
||||
//
|
||||
// ┌─┐
|
||||
// match │A│ {
|
||||
// ┌───┴─┘
|
||||
// │
|
||||
// ┌▼┌───►┌─┐ ┌─┐
|
||||
// │B│ if │C│ =>│D│,
|
||||
// └─┘ ├─┴──►└─┴──────┐
|
||||
// ┌──┘ │
|
||||
// ┌──┘ │
|
||||
// │ │
|
||||
// ┌▼┌───►┌─┐ ┌─┐ │
|
||||
// │E│ if │F│ =>│G│, │
|
||||
// └─┘ ├─┴──►└─┴┐ │
|
||||
// │ │ │
|
||||
// } ▼ ▼ │
|
||||
// ┌─┐◄───────────────────┘
|
||||
// │H│
|
||||
// └─┘
|
||||
//
|
||||
// The order we want is that the scrutinee (A) flows into the first pattern (B),
|
||||
// which flows into the guard (C). Then the guard either flows into the arm body
|
||||
// (D) or into the start of the next arm (E). Finally, the body flows to the end
|
||||
// of the match block (H).
|
||||
//
|
||||
// The subsequent arms follow the same ordering. First we go to the pattern, then
|
||||
// the guard (if present, otherwise it flows straight into the body), then into
|
||||
// the body and then to the end of the match expression.
|
||||
//
|
||||
// The comments below show which edge is being added.
|
||||
self.visit_expr(scrutinee);
|
||||
|
||||
let (guard_exit, arm_end_ids) = arms.iter().fold(
|
||||
(self.expr_index, vec![]),
|
||||
|(incoming_edge, mut arm_end_ids), hir::Arm { pat, body, guard, .. }| {
|
||||
// A -> B, or C -> E
|
||||
self.drop_ranges.add_control_edge(incoming_edge, self.expr_index + 1);
|
||||
self.visit_pat(pat);
|
||||
// B -> C and E -> F are added implicitly due to the traversal order.
|
||||
match guard {
|
||||
Some(Guard::If(expr)) => self.visit_expr(expr),
|
||||
Some(Guard::IfLet(pat, expr)) => {
|
||||
self.visit_pat(pat);
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
// Likewise, C -> D and F -> G are added implicitly.
|
||||
|
||||
// Save C, F, so we can add the other outgoing edge.
|
||||
let to_next_arm = self.expr_index;
|
||||
|
||||
// The default edge does not get added since we also have an explicit edge,
|
||||
// so we also need to add an edge to the next node as well.
|
||||
//
|
||||
// This adds C -> D, F -> G
|
||||
self.drop_ranges.add_control_edge(self.expr_index, self.expr_index + 1);
|
||||
self.visit_expr(body);
|
||||
|
||||
// Save the end of the body so we can add the exit edge once we know where
|
||||
// the exit is.
|
||||
arm_end_ids.push(self.expr_index);
|
||||
|
||||
// Pass C to the next iteration, as well as vec![D]
|
||||
//
|
||||
// On the last round through, we pass F and vec![D, G] so that we can
|
||||
// add all the exit edges.
|
||||
(to_next_arm, arm_end_ids)
|
||||
},
|
||||
);
|
||||
// F -> H
|
||||
self.drop_ranges.add_control_edge(guard_exit, self.expr_index + 1);
|
||||
|
||||
arm_end_ids.into_iter().for_each(|arm_end| {
|
||||
// D -> H, G -> H
|
||||
self.drop_ranges.add_control_edge(arm_end, self.expr_index + 1)
|
||||
});
|
||||
}
|
||||
|
||||
ExprKind::Loop(body, ..) => {
|
||||
let loop_begin = self.expr_index + 1;
|
||||
if body.stmts.is_empty() && body.expr.is_none() {
|
||||
// For empty loops we won't have updated self.expr_index after visiting the
|
||||
// body, meaning we'd get an edge from expr_index to expr_index + 1, but
|
||||
// instead we want an edge from expr_index + 1 to expr_index + 1.
|
||||
self.drop_ranges.add_control_edge(loop_begin, loop_begin);
|
||||
} else {
|
||||
self.visit_block(body);
|
||||
self.drop_ranges.add_control_edge(self.expr_index, loop_begin);
|
||||
}
|
||||
}
|
||||
ExprKind::Break(hir::Destination { target_id: Ok(target), .. }, ..)
|
||||
| ExprKind::Continue(hir::Destination { target_id: Ok(target), .. }, ..) => {
|
||||
self.drop_ranges.add_control_edge_hir_id(self.expr_index, target);
|
||||
}
|
||||
|
||||
ExprKind::Call(f, args) => {
|
||||
self.visit_expr(f);
|
||||
for arg in args {
|
||||
self.visit_expr(arg);
|
||||
}
|
||||
|
||||
self.handle_uninhabited_return(expr);
|
||||
}
|
||||
ExprKind::MethodCall(_, _, exprs, _) => {
|
||||
for expr in exprs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
self.handle_uninhabited_return(expr);
|
||||
}
|
||||
|
||||
ExprKind::AddrOf(..)
|
||||
| ExprKind::Array(..)
|
||||
| ExprKind::AssignOp(..)
|
||||
| ExprKind::Binary(..)
|
||||
| ExprKind::Block(..)
|
||||
| ExprKind::Box(..)
|
||||
| ExprKind::Break(..)
|
||||
| ExprKind::Cast(..)
|
||||
| ExprKind::Closure(..)
|
||||
| ExprKind::ConstBlock(..)
|
||||
| ExprKind::Continue(..)
|
||||
| ExprKind::DropTemps(..)
|
||||
| ExprKind::Err
|
||||
| ExprKind::Field(..)
|
||||
| ExprKind::Index(..)
|
||||
| ExprKind::InlineAsm(..)
|
||||
| ExprKind::Let(..)
|
||||
| ExprKind::Lit(..)
|
||||
| ExprKind::Path(..)
|
||||
| ExprKind::Repeat(..)
|
||||
| ExprKind::Ret(..)
|
||||
| ExprKind::Struct(..)
|
||||
| ExprKind::Tup(..)
|
||||
| ExprKind::Type(..)
|
||||
| ExprKind::Unary(..)
|
||||
| ExprKind::Yield(..) => intravisit::walk_expr(self, expr),
|
||||
}
|
||||
|
||||
self.expr_index = self.expr_index + 1;
|
||||
self.drop_ranges.add_node_mapping(expr.hir_id, self.expr_index);
|
||||
self.consume_expr(expr);
|
||||
if let Some(expr) = reinit {
|
||||
self.reinit_expr(expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
|
||||
intravisit::walk_pat(self, pat);
|
||||
|
||||
// Increment expr_count here to match what InteriorVisitor expects.
|
||||
self.expr_index = self.expr_index + 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl DropRangesBuilder {
|
||||
fn new(
|
||||
tracked_values: impl Iterator<Item = TrackedValue>,
|
||||
hir: Map<'_>,
|
||||
num_exprs: usize,
|
||||
) -> Self {
|
||||
let mut tracked_value_map = FxHashMap::<_, TrackedValueIndex>::default();
|
||||
let mut next = <_>::from(0u32);
|
||||
for value in tracked_values {
|
||||
for_each_consumable(hir, value, |value| {
|
||||
if !tracked_value_map.contains_key(&value) {
|
||||
tracked_value_map.insert(value, next);
|
||||
next = next + 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
debug!("hir_id_map: {:?}", tracked_value_map);
|
||||
let num_values = tracked_value_map.len();
|
||||
Self {
|
||||
tracked_value_map,
|
||||
nodes: IndexVec::from_fn_n(|_| NodeInfo::new(num_values), num_exprs + 1),
|
||||
deferred_edges: <_>::default(),
|
||||
post_order_map: <_>::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn tracked_value_index(&self, tracked_value: TrackedValue) -> TrackedValueIndex {
|
||||
*self.tracked_value_map.get(&tracked_value).unwrap()
|
||||
}
|
||||
|
||||
/// Adds an entry in the mapping from HirIds to PostOrderIds
|
||||
///
|
||||
/// Needed so that `add_control_edge_hir_id` can work.
|
||||
fn add_node_mapping(&mut self, node_hir_id: HirId, post_order_id: PostOrderId) {
|
||||
self.post_order_map.insert(node_hir_id, post_order_id);
|
||||
}
|
||||
|
||||
/// Like add_control_edge, but uses a hir_id as the target.
|
||||
///
|
||||
/// This can be used for branches where we do not know the PostOrderId of the target yet,
|
||||
/// such as when handling `break` or `continue`.
|
||||
fn add_control_edge_hir_id(&mut self, from: PostOrderId, to: HirId) {
|
||||
self.deferred_edges.push((from, to));
|
||||
}
|
||||
|
||||
fn drop_at(&mut self, value: TrackedValue, location: PostOrderId) {
|
||||
let value = self.tracked_value_index(value);
|
||||
self.node_mut(location.into()).drops.push(value);
|
||||
}
|
||||
|
||||
fn reinit_at(&mut self, value: TrackedValue, location: PostOrderId) {
|
||||
let value = match self.tracked_value_map.get(&value) {
|
||||
Some(value) => *value,
|
||||
// If there's no value, this is never consumed and therefore is never dropped. We can
|
||||
// ignore this.
|
||||
None => return,
|
||||
};
|
||||
self.node_mut(location.into()).reinits.push(value);
|
||||
}
|
||||
|
||||
/// Looks up PostOrderId for any control edges added by HirId and adds a proper edge for them.
|
||||
///
|
||||
/// Should be called after visiting the HIR but before solving the control flow, otherwise some
|
||||
/// edges will be missed.
|
||||
fn process_deferred_edges(&mut self) {
|
||||
let mut edges = vec![];
|
||||
swap(&mut edges, &mut self.deferred_edges);
|
||||
edges.into_iter().for_each(|(from, to)| {
|
||||
let to = *self.post_order_map.get(&to).expect("Expression ID not found");
|
||||
trace!("Adding deferred edge from {:?} to {:?}", from, to);
|
||||
self.add_control_edge(from, to)
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
use super::{DropRangesBuilder, PostOrderId};
|
||||
use rustc_index::{bit_set::BitSet, vec::IndexVec};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
impl DropRangesBuilder {
|
||||
pub fn propagate_to_fixpoint(&mut self) {
|
||||
trace!("before fixpoint: {:#?}", self);
|
||||
let preds = self.compute_predecessors();
|
||||
|
||||
trace!("predecessors: {:#?}", preds.iter_enumerated().collect::<BTreeMap<_, _>>());
|
||||
|
||||
let mut new_state = BitSet::new_empty(self.num_values());
|
||||
let mut changed_nodes = BitSet::new_empty(self.nodes.len());
|
||||
let mut unchanged_mask = BitSet::new_filled(self.nodes.len());
|
||||
changed_nodes.insert(0u32.into());
|
||||
|
||||
let mut propagate = || {
|
||||
let mut changed = false;
|
||||
unchanged_mask.insert_all();
|
||||
for id in self.nodes.indices() {
|
||||
trace!("processing {:?}, changed_nodes: {:?}", id, changed_nodes);
|
||||
// Check if any predecessor has changed, and if not then short-circuit.
|
||||
//
|
||||
// We handle the start node specially, since it doesn't have any predecessors,
|
||||
// but we need to start somewhere.
|
||||
if match id.index() {
|
||||
0 => !changed_nodes.contains(id),
|
||||
_ => !preds[id].iter().any(|pred| changed_nodes.contains(*pred)),
|
||||
} {
|
||||
trace!("short-circuiting because none of {:?} have changed", preds[id]);
|
||||
unchanged_mask.remove(id);
|
||||
continue;
|
||||
}
|
||||
|
||||
if id.index() == 0 {
|
||||
new_state.clear();
|
||||
} else {
|
||||
// If we are not the start node and we have no predecessors, treat
|
||||
// everything as dropped because there's no way to get here anyway.
|
||||
new_state.insert_all();
|
||||
};
|
||||
|
||||
for pred in &preds[id] {
|
||||
new_state.intersect(&self.nodes[*pred].drop_state);
|
||||
}
|
||||
|
||||
for drop in &self.nodes[id].drops {
|
||||
new_state.insert(*drop);
|
||||
}
|
||||
|
||||
for reinit in &self.nodes[id].reinits {
|
||||
new_state.remove(*reinit);
|
||||
}
|
||||
|
||||
if self.nodes[id].drop_state.intersect(&new_state) {
|
||||
changed_nodes.insert(id);
|
||||
changed = true;
|
||||
} else {
|
||||
unchanged_mask.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
changed_nodes.intersect(&unchanged_mask);
|
||||
changed
|
||||
};
|
||||
|
||||
while propagate() {
|
||||
trace!("drop_state changed, re-running propagation");
|
||||
}
|
||||
|
||||
trace!("after fixpoint: {:#?}", self);
|
||||
}
|
||||
|
||||
fn compute_predecessors(&self) -> IndexVec<PostOrderId, Vec<PostOrderId>> {
|
||||
let mut preds = IndexVec::from_fn_n(|_| vec![], self.nodes.len());
|
||||
for (id, node) in self.nodes.iter_enumerated() {
|
||||
// If the node has no explicit successors, we assume that control
|
||||
// will from this node into the next one.
|
||||
//
|
||||
// If there are successors listed, then we assume that all
|
||||
// possible successors are given and we do not include the default.
|
||||
if node.successors.len() == 0 && id.index() != self.nodes.len() - 1 {
|
||||
preds[id + 1].push(id);
|
||||
} else {
|
||||
for succ in &node.successors {
|
||||
preds[*succ].push(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
preds
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
//! Implementation of GraphWalk for DropRanges so we can visualize the control
|
||||
//! flow graph when needed for debugging.
|
||||
|
||||
use rustc_graphviz as dot;
|
||||
|
||||
use super::{DropRangesBuilder, PostOrderId};
|
||||
|
||||
/// Writes the CFG for DropRangesBuilder to a .dot file for visualization.
|
||||
///
|
||||
/// It is not normally called, but is kept around to easily add debugging
|
||||
/// code when needed.
|
||||
#[allow(dead_code)]
|
||||
pub(super) fn write_graph_to_file(drop_ranges: &DropRangesBuilder, filename: &str) {
|
||||
dot::render(drop_ranges, &mut std::fs::File::create(filename).unwrap()).unwrap();
|
||||
}
|
||||
|
||||
impl<'a> dot::GraphWalk<'a> for DropRangesBuilder {
|
||||
type Node = PostOrderId;
|
||||
|
||||
type Edge = (PostOrderId, PostOrderId);
|
||||
|
||||
fn nodes(&'a self) -> dot::Nodes<'a, Self::Node> {
|
||||
self.nodes.iter_enumerated().map(|(i, _)| i).collect()
|
||||
}
|
||||
|
||||
fn edges(&'a self) -> dot::Edges<'a, Self::Edge> {
|
||||
self.nodes
|
||||
.iter_enumerated()
|
||||
.flat_map(|(i, node)| {
|
||||
if node.successors.len() == 0 {
|
||||
vec![(i, i + 1)]
|
||||
} else {
|
||||
node.successors.iter().map(move |&s| (i, s)).collect()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn source(&'a self, edge: &Self::Edge) -> Self::Node {
|
||||
edge.0
|
||||
}
|
||||
|
||||
fn target(&'a self, edge: &Self::Edge) -> Self::Node {
|
||||
edge.1
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> dot::Labeller<'a> for DropRangesBuilder {
|
||||
type Node = PostOrderId;
|
||||
|
||||
type Edge = (PostOrderId, PostOrderId);
|
||||
|
||||
fn graph_id(&'a self) -> dot::Id<'a> {
|
||||
dot::Id::new("drop_ranges").unwrap()
|
||||
}
|
||||
|
||||
fn node_id(&'a self, n: &Self::Node) -> dot::Id<'a> {
|
||||
dot::Id::new(format!("id{}", n.index())).unwrap()
|
||||
}
|
||||
|
||||
fn node_label(&'a self, n: &Self::Node) -> dot::LabelText<'a> {
|
||||
dot::LabelText::LabelStr(
|
||||
format!(
|
||||
"{:?}, local_id: {}",
|
||||
n,
|
||||
self.post_order_map
|
||||
.iter()
|
||||
.find(|(_hir_id, &post_order_id)| post_order_id == *n)
|
||||
.map_or("<unknown>".into(), |(hir_id, _)| format!(
|
||||
"{}",
|
||||
hir_id.local_id.index()
|
||||
))
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
use super::TrackedValue;
|
||||
use crate::{
|
||||
check::FnCtxt,
|
||||
expr_use_visitor::{self, ExprUseVisitor},
|
||||
};
|
||||
use hir::{def_id::DefId, Body, HirId, HirIdMap};
|
||||
use rustc_data_structures::stable_set::FxHashSet;
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::hir::map::Map;
|
||||
|
||||
pub(super) fn find_consumed_and_borrowed<'a, 'tcx>(
|
||||
fcx: &'a FnCtxt<'a, 'tcx>,
|
||||
def_id: DefId,
|
||||
body: &'tcx Body<'tcx>,
|
||||
) -> ConsumedAndBorrowedPlaces {
|
||||
let mut expr_use_visitor = ExprUseDelegate::new(fcx.tcx.hir());
|
||||
expr_use_visitor.consume_body(fcx, def_id, body);
|
||||
expr_use_visitor.places
|
||||
}
|
||||
|
||||
pub(super) struct ConsumedAndBorrowedPlaces {
|
||||
/// Records the variables/expressions that are dropped by a given expression.
|
||||
///
|
||||
/// The key is the hir-id of the expression, and the value is a set or hir-ids for variables
|
||||
/// or values that are consumed by that expression.
|
||||
///
|
||||
/// Note that this set excludes "partial drops" -- for example, a statement like `drop(x.y)` is
|
||||
/// not considered a drop of `x`, although it would be a drop of `x.y`.
|
||||
pub(super) consumed: HirIdMap<FxHashSet<TrackedValue>>,
|
||||
/// A set of hir-ids of values or variables that are borrowed at some point within the body.
|
||||
pub(super) borrowed: FxHashSet<TrackedValue>,
|
||||
}
|
||||
|
||||
/// Works with ExprUseVisitor to find interesting values for the drop range analysis.
|
||||
///
|
||||
/// Interesting values are those that are either dropped or borrowed. For dropped values, we also
|
||||
/// record the parent expression, which is the point where the drop actually takes place.
|
||||
struct ExprUseDelegate<'tcx> {
|
||||
hir: Map<'tcx>,
|
||||
places: ConsumedAndBorrowedPlaces,
|
||||
}
|
||||
|
||||
impl<'tcx> ExprUseDelegate<'tcx> {
|
||||
fn new(hir: Map<'tcx>) -> Self {
|
||||
Self {
|
||||
hir,
|
||||
places: ConsumedAndBorrowedPlaces {
|
||||
consumed: <_>::default(),
|
||||
borrowed: <_>::default(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn consume_body(&mut self, fcx: &'_ FnCtxt<'_, 'tcx>, def_id: DefId, body: &'tcx Body<'tcx>) {
|
||||
// Run ExprUseVisitor to find where values are consumed.
|
||||
ExprUseVisitor::new(
|
||||
self,
|
||||
&fcx.infcx,
|
||||
def_id.expect_local(),
|
||||
fcx.param_env,
|
||||
&fcx.typeck_results.borrow(),
|
||||
)
|
||||
.consume_body(body);
|
||||
}
|
||||
|
||||
fn mark_consumed(&mut self, consumer: HirId, target: TrackedValue) {
|
||||
if !self.places.consumed.contains_key(&consumer) {
|
||||
self.places.consumed.insert(consumer, <_>::default());
|
||||
}
|
||||
self.places.consumed.get_mut(&consumer).map(|places| places.insert(target));
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> expr_use_visitor::Delegate<'tcx> for ExprUseDelegate<'tcx> {
|
||||
fn consume(
|
||||
&mut self,
|
||||
place_with_id: &expr_use_visitor::PlaceWithHirId<'tcx>,
|
||||
diag_expr_id: HirId,
|
||||
) {
|
||||
let parent = match self.hir.find_parent_node(place_with_id.hir_id) {
|
||||
Some(parent) => parent,
|
||||
None => place_with_id.hir_id,
|
||||
};
|
||||
debug!(
|
||||
"consume {:?}; diag_expr_id={:?}, using parent {:?}",
|
||||
place_with_id, diag_expr_id, parent
|
||||
);
|
||||
place_with_id
|
||||
.try_into()
|
||||
.map_or((), |tracked_value| self.mark_consumed(parent, tracked_value));
|
||||
}
|
||||
|
||||
fn borrow(
|
||||
&mut self,
|
||||
place_with_id: &expr_use_visitor::PlaceWithHirId<'tcx>,
|
||||
_diag_expr_id: HirId,
|
||||
_bk: rustc_middle::ty::BorrowKind,
|
||||
) {
|
||||
place_with_id
|
||||
.try_into()
|
||||
.map_or(false, |tracked_value| self.places.borrowed.insert(tracked_value));
|
||||
}
|
||||
|
||||
fn mutate(
|
||||
&mut self,
|
||||
_assignee_place: &expr_use_visitor::PlaceWithHirId<'tcx>,
|
||||
_diag_expr_id: HirId,
|
||||
) {
|
||||
}
|
||||
|
||||
fn fake_read(
|
||||
&mut self,
|
||||
_place: expr_use_visitor::Place<'tcx>,
|
||||
_cause: rustc_middle::mir::FakeReadCause,
|
||||
_diag_expr_id: HirId,
|
||||
) {
|
||||
}
|
||||
}
|
|
@ -43,7 +43,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let item_def_id = self.tcx.hir().local_def_id(item_id);
|
||||
|
||||
// This attribute causes us to dump some writeback information
|
||||
// in the form of errors, which is uSymbol for unit tests.
|
||||
// in the form of errors, which is used for unit tests.
|
||||
let rustc_dump_user_substs =
|
||||
self.tcx.has_attr(item_def_id.to_def_id(), sym::rustc_dump_user_substs);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue