1
Fork 0

Auto merge of #98335 - JohnTitor:rollup-j2zudxv, r=JohnTitor

Rollup of 11 pull requests

Successful merges:

 - #94033 (Improve docs for `is_running` to explain use case)
 - #97269 (adjust transmute const stabilization version)
 - #97805 (Add proper tracing spans to rustc_trait_selection::traits::error_reporting)
 - #98022 (Fix erroneous span for borrowck error)
 - #98124 (Improve loading of crates.js and sidebar-items.js)
 - #98278 (Some token stream cleanups)
 - #98306 (`try_fold_unevaluated` for infallible folders)
 - #98313 (Remove lies in comments.)
 - #98323 (⬆️ rust-analyzer)
 - #98329 (Avoid an ICE and instead let the compiler report a useful error)
 - #98330 (update ioslice docs to use shared slices)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-06-21 13:41:37 +00:00
commit 72fd41a8b4
32 changed files with 359 additions and 334 deletions

View file

@ -25,7 +25,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_span::{Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};
use std::{fmt, iter, mem};
use std::{fmt, iter};
/// When the main Rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token tree. This is a very
@ -399,45 +399,6 @@ impl TokenStream {
self.0.len()
}
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() {
0 => TokenStream::default(),
1 => streams.pop().unwrap(),
_ => {
// We are going to extend the first stream in `streams` with
// the elements from the subsequent streams. This requires
// using `make_mut()` on the first stream, and in practice this
// doesn't cause cloning 99.9% of the time.
//
// One very common use case is when `streams` has two elements,
// where the first stream has any number of elements within
// (often 1, but sometimes many more) and the second stream has
// a single element within.
// Determine how much the first stream will be extended.
// Needed to avoid quadratic blow up from on-the-fly
// reallocations (#57735).
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
// Get the first stream. If it's `None`, create an empty
// stream.
let mut iter = streams.drain(..);
let mut first_stream_lrc = iter.next().unwrap().0;
// Append the elements to the first stream, after reserving
// space for them.
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
first_vec_mut.reserve(num_appends);
for stream in iter {
first_vec_mut.extend(stream.0.iter().cloned());
}
// Create the final `TokenStream`.
TokenStream(first_stream_lrc)
}
}
}
pub fn trees(&self) -> CursorRef<'_> {
CursorRef::new(self)
}
@ -562,50 +523,65 @@ impl TokenStreamBuilder {
}
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
let mut stream = stream.into();
// If `self` is not empty and the last tree within the last stream is a
// token tree marked with `Joint`...
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut()
&& let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last()
// ...and `stream` is not empty and the first tree within it is
// a token tree...
&& let TokenStream(ref mut stream_lrc) = stream
&& let Some((TokenTree::Token(token), spacing)) = stream_lrc.first()
// ...and the two tokens can be glued together...
&& let Some(glued_tok) = last_token.glue(&token)
{
// ...then do so, by overwriting the last token
// tree in `self` and removing the first token tree
// from `stream`. This requires using `make_mut()`
// on the last stream in `self` and on `stream`,
// and in practice this doesn't cause cloning 99.9%
// of the time.
// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
let stream_vec_mut = Lrc::make_mut(stream_lrc);
stream_vec_mut.remove(0);
// Don't push `stream` if it's empty -- that could
// block subsequent token gluing, by getting
// between two token trees that should be glued
// together.
if !stream.is_empty() {
self.0.push(stream);
}
return;
}
self.0.push(stream);
self.0.push(stream.into());
}
pub fn build(self) -> TokenStream {
TokenStream::from_streams(self.0)
let mut streams = self.0;
match streams.len() {
0 => TokenStream::default(),
1 => streams.pop().unwrap(),
_ => {
// We will extend the first stream in `streams` with the
// elements from the subsequent streams. This requires using
// `make_mut()` on the first stream, and in practice this
// doesn't cause cloning 99.9% of the time.
//
// One very common use case is when `streams` has two elements,
// where the first stream has any number of elements within
// (often 1, but sometimes many more) and the second stream has
// a single element within.
// Determine how much the first stream will be extended.
// Needed to avoid quadratic blow up from on-the-fly
// reallocations (#57735).
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
// Get the first stream, which will become the result stream.
// If it's `None`, create an empty stream.
let mut iter = streams.drain(..);
let mut res_stream_lrc = iter.next().unwrap().0;
// Append the subsequent elements to the result stream, after
// reserving space for them.
let res_vec_mut = Lrc::make_mut(&mut res_stream_lrc);
res_vec_mut.reserve(num_appends);
for stream in iter {
let stream_iter = stream.0.iter().cloned();
// If (a) `res_mut_vec` is not empty and the last tree
// within it is a token tree marked with `Joint`, and (b)
// `stream` is not empty and the first tree within it is a
// token tree, and (c) the two tokens can be glued
// together...
if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last()
&& let Some((TokenTree::Token(tok), spacing)) = stream.0.first()
&& let Some(glued_tok) = last_tok.glue(&tok)
{
// ...then overwrite the last token tree in
// `res_vec_mut` with the glued token, and skip the
// first token tree from `stream`.
*res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
res_vec_mut.extend(stream_iter.skip(1));
} else {
// Append all of `stream`.
res_vec_mut.extend(stream_iter);
}
}
TokenStream(res_stream_lrc)
}
}
}
}
@ -679,20 +655,6 @@ impl Cursor {
})
}
pub fn index(&self) -> usize {
self.index
}
pub fn append(&mut self, new_stream: TokenStream) {
if new_stream.is_empty() {
return;
}
let index = self.index;
let stream = mem::take(&mut self.stream);
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
self.index = index;
}
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}

View file

@ -357,12 +357,20 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
.add_element(live_region_vid, location);
});
// HACK(compiler-errors): Constants that are gathered into Body.required_consts
// have their locations erased...
let locations = if location != Location::START {
location.to_locations()
} else {
Locations::All(constant.span)
};
if let Some(annotation_index) = constant.user_ty {
if let Err(terr) = self.cx.relate_type_and_user_type(
constant.literal.ty(),
ty::Variance::Invariant,
&UserTypeProjection { base: annotation_index, projs: vec![] },
location.to_locations(),
locations,
ConstraintCategory::Boring,
) {
let annotation = &self.cx.user_type_annotations[annotation_index];
@ -390,12 +398,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
promoted: &Body<'tcx>,
ty,
san_ty| {
if let Err(terr) = verifier.cx.eq_types(
ty,
san_ty,
location.to_locations(),
ConstraintCategory::Boring,
) {
if let Err(terr) =
verifier.cx.eq_types(ty, san_ty, locations, ConstraintCategory::Boring)
{
span_mirbug!(
verifier,
promoted,
@ -416,7 +421,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
}
} else {
if let Err(terr) = self.cx.fully_perform_op(
location.to_locations(),
locations,
ConstraintCategory::Boring,
self.cx.param_env.and(type_op::ascribe_user_type::AscribeUserType::new(
constant.literal.ty(),
@ -435,7 +440,6 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
}
} else if let Some(static_def_id) = constant.check_static_ptr(tcx) {
let unnormalized_ty = tcx.type_of(static_def_id);
let locations = location.to_locations();
let normalized_ty = self.cx.normalize(unnormalized_ty, locations);
let literal_ty = constant.literal.ty().builtin_deref(true).unwrap().ty;
@ -454,7 +458,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
self.cx.normalize_and_prove_instantiated_predicates(
def_id,
instantiated_predicates,
location.to_locations(),
locations,
);
}
}

View file

@ -329,6 +329,7 @@ impl Ident {
sess.symbol_gallery.insert(sym, span);
Ident { sym, is_raw, span }
}
fn dollar_crate(span: Span) -> Ident {
// `$crate` is accepted as an ident only if it comes from the compiler.
Ident { sym: kw::DollarCrate, is_raw: false, span }
@ -403,6 +404,7 @@ impl server::TokenStream for Rustc<'_, '_> {
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
parse_stream_from_source_str(
FileName::proc_macro_source_code(src),
@ -411,9 +413,11 @@ impl server::TokenStream for Rustc<'_, '_> {
Some(self.call_site),
)
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
pprust::tts_to_string(stream)
}
fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
// Parse the expression from our tokenstream.
let expr: PResult<'_, _> = try {
@ -464,12 +468,14 @@ impl server::TokenStream for Rustc<'_, '_> {
_ => Err(()),
}
}
fn from_token_tree(
&mut self,
tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
) -> Self::TokenStream {
tree.to_internal()
}
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
@ -484,6 +490,7 @@ impl server::TokenStream for Rustc<'_, '_> {
}
builder.build()
}
fn concat_streams(
&mut self,
base: Option<Self::TokenStream>,
@ -498,6 +505,7 @@ impl server::TokenStream for Rustc<'_, '_> {
}
builder.build()
}
fn into_trees(
&mut self,
stream: Self::TokenStream,
@ -522,10 +530,10 @@ impl server::TokenStream for Rustc<'_, '_> {
// FIXME: It needs to be removed, but there are some
// compatibility issues (see #73345).
if group.flatten {
cursor.append(group.stream);
continue;
tts.append(&mut self.into_trees(group.stream));
} else {
tts.push(TokenTree::Group(group));
}
tts.push(TokenTree::Group(group));
}
Some(tt) => tts.push(tt),
None => return tts,
@ -543,21 +551,27 @@ impl server::Group for Rustc<'_, '_> {
flatten: false,
}
}
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
group.delimiter
}
fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
group.stream.clone()
}
fn span(&mut self, group: &Self::Group) -> Self::Span {
group.span.entire()
}
fn span_open(&mut self, group: &Self::Group) -> Self::Span {
group.span.open
}
fn span_close(&mut self, group: &Self::Group) -> Self::Span {
group.span.close
}
fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
group.span = DelimSpan::from_single(span);
}
@ -567,15 +581,19 @@ impl server::Punct for Rustc<'_, '_> {
fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
}
fn as_char(&mut self, punct: Self::Punct) -> char {
punct.ch
}
fn spacing(&mut self, punct: Self::Punct) -> Spacing {
if punct.joint { Spacing::Joint } else { Spacing::Alone }
}
fn span(&mut self, punct: Self::Punct) -> Self::Span {
punct.span
}
fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
Punct { span, ..punct }
}
@ -585,9 +603,11 @@ impl server::Ident for Rustc<'_, '_> {
fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
Ident::new(self.sess(), Symbol::intern(string), is_raw, span)
}
fn span(&mut self, ident: Self::Ident) -> Self::Span {
ident.span
}
fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
Ident { span, ..ident }
}
@ -639,45 +659,57 @@ impl server::Literal for Rustc<'_, '_> {
Ok(Literal { lit, span: self.call_site })
}
fn to_string(&mut self, literal: &Self::Literal) -> String {
literal.lit.to_string()
}
fn debug_kind(&mut self, literal: &Self::Literal) -> String {
format!("{:?}", literal.lit.kind)
}
fn symbol(&mut self, literal: &Self::Literal) -> String {
literal.lit.symbol.to_string()
}
fn suffix(&mut self, literal: &Self::Literal) -> Option<String> {
literal.lit.suffix.as_ref().map(Symbol::to_string)
}
fn integer(&mut self, n: &str) -> Self::Literal {
self.lit(token::Integer, Symbol::intern(n), None)
}
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
}
fn float(&mut self, n: &str) -> Self::Literal {
self.lit(token::Float, Symbol::intern(n), None)
}
fn f32(&mut self, n: &str) -> Self::Literal {
self.lit(token::Float, Symbol::intern(n), Some(sym::f32))
}
fn f64(&mut self, n: &str) -> Self::Literal {
self.lit(token::Float, Symbol::intern(n), Some(sym::f64))
}
fn string(&mut self, string: &str) -> Self::Literal {
let quoted = format!("{:?}", string);
assert!(quoted.starts_with('"') && quoted.ends_with('"'));
let symbol = &quoted[1..quoted.len() - 1];
self.lit(token::Str, Symbol::intern(symbol), None)
}
fn character(&mut self, ch: char) -> Self::Literal {
let quoted = format!("{:?}", ch);
assert!(quoted.starts_with('\'') && quoted.ends_with('\''));
let symbol = &quoted[1..quoted.len() - 1];
self.lit(token::Char, Symbol::intern(symbol), None)
}
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
let string = bytes
.iter()
@ -687,12 +719,15 @@ impl server::Literal for Rustc<'_, '_> {
.collect::<String>();
self.lit(token::ByteStr, Symbol::intern(&string), None)
}
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
literal.span
}
fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
literal.span = span;
}
fn subspan(
&mut self,
literal: &Self::Literal,
@ -735,6 +770,7 @@ impl server::SourceFile for Rustc<'_, '_> {
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
Lrc::ptr_eq(file1, file2)
}
fn path(&mut self, file: &Self::SourceFile) -> String {
match file.name {
FileName::Real(ref name) => name
@ -746,6 +782,7 @@ impl server::SourceFile for Rustc<'_, '_> {
_ => file.name.prefer_local().to_string(),
}
}
fn is_real(&mut self, file: &Self::SourceFile) -> bool {
file.is_real_file()
}
@ -755,6 +792,7 @@ impl server::MultiSpan for Rustc<'_, '_> {
fn new(&mut self) -> Self::MultiSpan {
vec![]
}
fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
spans.push(span)
}
@ -766,6 +804,7 @@ impl server::Diagnostic for Rustc<'_, '_> {
diag.set_span(MultiSpan::from_spans(spans));
diag
}
fn sub(
&mut self,
diag: &mut Self::Diagnostic,
@ -775,6 +814,7 @@ impl server::Diagnostic for Rustc<'_, '_> {
) {
diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
}
fn emit(&mut self, mut diag: Self::Diagnostic) {
self.sess().span_diagnostic.emit_diagnostic(&mut diag);
}
@ -788,38 +828,49 @@ impl server::Span for Rustc<'_, '_> {
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
}
}
fn def_site(&mut self) -> Self::Span {
self.def_site
}
fn call_site(&mut self) -> Self::Span {
self.call_site
}
fn mixed_site(&mut self) -> Self::Span {
self.mixed_site
}
fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
self.sess().source_map().lookup_char_pos(span.lo()).file
}
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
span.parent_callsite()
}
fn source(&mut self, span: Self::Span) -> Self::Span {
span.source_callsite()
}
fn start(&mut self, span: Self::Span) -> LineColumn {
let loc = self.sess().source_map().lookup_char_pos(span.lo());
LineColumn { line: loc.line, column: loc.col.to_usize() }
}
fn end(&mut self, span: Self::Span) -> LineColumn {
let loc = self.sess().source_map().lookup_char_pos(span.hi());
LineColumn { line: loc.line, column: loc.col.to_usize() }
}
fn before(&mut self, span: Self::Span) -> Self::Span {
span.shrink_to_lo()
}
fn after(&mut self, span: Self::Span) -> Self::Span {
span.shrink_to_hi()
}
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
let self_loc = self.sess().source_map().lookup_char_pos(first.lo());
let other_loc = self.sess().source_map().lookup_char_pos(second.lo());
@ -830,9 +881,11 @@ impl server::Span for Rustc<'_, '_> {
Some(first.to(second))
}
fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
span.with_ctxt(at.ctxt())
}
fn source_text(&mut self, span: Self::Span) -> Option<String> {
self.sess().source_map().span_to_snippet(span).ok()
}
@ -863,6 +916,7 @@ impl server::Span for Rustc<'_, '_> {
fn save_span(&mut self, span: Self::Span) -> usize {
self.sess().save_proc_macro_span(span)
}
fn recover_proc_macro_span(&mut self, id: usize) -> Self::Span {
let (resolver, krate, def_site) = (&*self.ecx.resolver, self.krate, self.def_site);
*self.rebased_spans.entry(id).or_insert_with(|| {

View file

@ -4,7 +4,6 @@ use rustc_ast::token;
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenStreamBuilder, TokenTree};
use rustc_span::create_default_session_globals_then;
use rustc_span::{BytePos, Span, Symbol};
use smallvec::smallvec;
fn string_to_ts(string: &str) -> TokenStream {
string_to_stream(string.to_owned())
@ -24,7 +23,10 @@ fn test_concat() {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::from_streams(smallvec![test_fst, test_snd]);
let mut builder = TokenStreamBuilder::new();
builder.push(test_fst);
builder.push(test_snd);
let eq_res = builder.build();
assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);

View file

@ -662,7 +662,11 @@ where
match b.kind() {
ty::ConstKind::Infer(InferConst::Var(_)) if D::forbid_inference_vars() => {
// Forbid inference variables in the RHS.
bug!("unexpected inference var {:?}", b)
self.infcx.tcx.sess.delay_span_bug(
self.delegate.span(),
format!("unexpected inference var {:?}", b,),
);
Ok(a)
}
// FIXME(invariance): see the related FIXME above.
_ => self.infcx.super_combine_consts(self, a, b),

View file

@ -371,6 +371,13 @@ where
Ok(self.fold_const(c))
}
fn try_fold_unevaluated(
&mut self,
c: ty::Unevaluated<'tcx>,
) -> Result<ty::Unevaluated<'tcx>, Self::Error> {
Ok(self.fold_unevaluated(c))
}
fn try_fold_predicate(
&mut self,
p: ty::Predicate<'tcx>,

View file

@ -259,7 +259,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
let cycle = self.resolve_vars_if_possible(cycle.to_owned());
assert!(!cycle.is_empty());
debug!("report_overflow_error_cycle: cycle={:?}", cycle);
debug!(?cycle, "report_overflow_error_cycle");
// The 'deepest' obligation is most likely to have a useful
// cause 'backtrace'
@ -1513,6 +1513,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
}
}
#[instrument(level = "debug", skip_all)]
fn report_projection_error(
&self,
obligation: &PredicateObligation<'tcx>,
@ -1551,15 +1552,9 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
&mut obligations,
);
debug!(
"report_projection_error obligation.cause={:?} obligation.param_env={:?}",
obligation.cause, obligation.param_env
);
debug!(?obligation.cause, ?obligation.param_env);
debug!(
"report_projection_error normalized_ty={:?} data.ty={:?}",
normalized_ty, data.term,
);
debug!(?normalized_ty, data.ty = ?data.term);
let is_normalized_ty_expected = !matches!(
obligation.cause.code().peel_derives(),
@ -2346,6 +2341,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
}
}
#[instrument(level = "debug", skip_all)]
fn suggest_unsized_bound_if_applicable(
&self,
err: &mut Diagnostic,
@ -2360,10 +2356,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
) else {
return;
};
debug!(
"suggest_unsized_bound_if_applicable: pred={:?} item_def_id={:?} span={:?}",
pred, item_def_id, span
);
debug!(?pred, ?item_def_id, ?span);
let (Some(node), true) = (
self.tcx.hir().get_if_local(item_def_id),
@ -2374,6 +2367,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
self.maybe_suggest_unsized_generics(err, span, node);
}
#[instrument(level = "debug", skip_all)]
fn maybe_suggest_unsized_generics<'hir>(
&self,
err: &mut Diagnostic,
@ -2384,8 +2378,8 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
return;
};
let sized_trait = self.tcx.lang_items().sized_trait();
debug!("maybe_suggest_unsized_generics: generics.params={:?}", generics.params);
debug!("maybe_suggest_unsized_generics: generics.predicates={:?}", generics.predicates);
debug!(?generics.params);
debug!(?generics.predicates);
let Some(param) = generics.params.iter().find(|param| param.span == span) else {
return;
};
@ -2399,7 +2393,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> {
if explicitly_sized {
return;
}
debug!("maybe_suggest_unsized_generics: param={:?}", param);
debug!(?param);
match node {
hir::Node::Item(
item @ hir::Item {
@ -2517,7 +2511,7 @@ impl<'v> Visitor<'v> for FindTypeParam {
if path.segments.len() == 1 && path.segments[0].ident.name == self.param =>
{
if !self.nested {
debug!("FindTypeParam::visit_ty: ty={:?}", ty);
debug!(?ty, "FindTypeParam::visit_ty");
self.invalid_spans.push(ty.span);
}
}

View file

@ -1628,16 +1628,12 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
/// ```
///
/// Returns `true` if an async-await specific note was added to the diagnostic.
#[instrument(level = "debug", skip_all, fields(?obligation.predicate, ?obligation.cause.span))]
fn maybe_note_obligation_cause_for_async_await(
&self,
err: &mut Diagnostic,
obligation: &PredicateObligation<'tcx>,
) -> bool {
debug!(
"maybe_note_obligation_cause_for_async_await: obligation.predicate={:?} \
obligation.cause.span={:?}",
obligation.predicate, obligation.cause.span
);
let hir = self.tcx.hir();
// Attempt to detect an async-await error by looking at the obligation causes, looking
@ -1677,7 +1673,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
let mut seen_upvar_tys_infer_tuple = false;
while let Some(code) = next_code {
debug!("maybe_note_obligation_cause_for_async_await: code={:?}", code);
debug!(?code);
match code {
ObligationCauseCode::FunctionArgumentObligation { parent_code, .. } => {
next_code = Some(parent_code);
@ -1685,10 +1681,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
ObligationCauseCode::ImplDerivedObligation(cause) => {
let ty = cause.derived.parent_trait_pred.skip_binder().self_ty();
debug!(
"maybe_note_obligation_cause_for_async_await: ImplDerived \
parent_trait_ref={:?} self_ty.kind={:?}",
cause.derived.parent_trait_pred,
ty.kind()
parent_trait_ref = ?cause.derived.parent_trait_pred,
self_ty.kind = ?ty.kind(),
"ImplDerived",
);
match *ty.kind() {
@ -1717,10 +1712,8 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
| ObligationCauseCode::BuiltinDerivedObligation(derived_obligation) => {
let ty = derived_obligation.parent_trait_pred.skip_binder().self_ty();
debug!(
"maybe_note_obligation_cause_for_async_await: \
parent_trait_ref={:?} self_ty.kind={:?}",
derived_obligation.parent_trait_pred,
ty.kind()
parent_trait_ref = ?derived_obligation.parent_trait_pred,
self_ty.kind = ?ty.kind(),
);
match *ty.kind() {
@ -1750,7 +1743,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
}
// Only continue if a generator was found.
debug!(?generator, ?trait_ref, ?target_ty, "maybe_note_obligation_cause_for_async_await");
debug!(?generator, ?trait_ref, ?target_ty);
let (Some(generator_did), Some(trait_ref), Some(target_ty)) = (generator, trait_ref, target_ty) else {
return false;
};
@ -1760,12 +1753,10 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
let in_progress_typeck_results = self.in_progress_typeck_results.map(|t| t.borrow());
let generator_did_root = self.tcx.typeck_root_def_id(generator_did);
debug!(
"maybe_note_obligation_cause_for_async_await: generator_did={:?} \
generator_did_root={:?} in_progress_typeck_results.hir_owner={:?} span={:?}",
generator_did,
generator_did_root,
in_progress_typeck_results.as_ref().map(|t| t.hir_owner),
span
?generator_did,
?generator_did_root,
in_progress_typeck_results.hir_owner = ?in_progress_typeck_results.as_ref().map(|t| t.hir_owner),
?span,
);
let generator_body = generator_did
@ -1788,7 +1779,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
if let Some(body) = generator_body {
visitor.visit_body(body);
}
debug!("maybe_note_obligation_cause_for_async_await: awaits = {:?}", visitor.awaits);
debug!(awaits = ?visitor.awaits);
// Look for a type inside the generator interior that matches the target type to get
// a span.
@ -1809,11 +1800,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
let ty_erased = self.tcx.erase_late_bound_regions(ty);
let ty_erased = self.tcx.erase_regions(ty_erased);
let eq = ty_erased == target_ty_erased;
debug!(
"maybe_note_obligation_cause_for_async_await: ty_erased={:?} \
target_ty_erased={:?} eq={:?}",
ty_erased, target_ty_erased, eq
);
debug!(?ty_erased, ?target_ty_erased, ?eq);
eq
};
@ -1888,6 +1875,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
/// Unconditionally adds the diagnostic note described in
/// `maybe_note_obligation_cause_for_async_await`'s documentation comment.
#[instrument(level = "debug", skip_all)]
fn note_obligation_cause_for_async_await(
&self,
err: &mut Diagnostic,
@ -2037,8 +2025,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
} else {
// Look at the last interior type to get a span for the `.await`.
debug!(
"note_obligation_cause_for_async_await generator_interior_types: {:#?}",
typeck_results.as_ref().map(|t| &t.generator_interior_types)
generator_interior_types = ?format_args!(
"{:#?}", typeck_results.as_ref().map(|t| &t.generator_interior_types)
),
);
explain_yield(interior_span, yield_span, scope_span);
}
@ -2073,7 +2062,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
// bar(Foo(std::ptr::null())).await;
// ^^^^^^^^^^^^^^^^^^^^^ raw-ptr `*T` created inside this struct ctor.
// ```
debug!("parent_def_kind: {:?}", self.tcx.def_kind(parent_did));
debug!(parent_def_kind = ?self.tcx.def_kind(parent_did));
let is_raw_borrow_inside_fn_like_call =
match self.tcx.def_kind(parent_did) {
DefKind::Fn | DefKind::Ctor(..) => target_ty.is_unsafe_ptr(),
@ -2131,7 +2120,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
// Add a note for the item obligation that remains - normally a note pointing to the
// bound that introduced the obligation (e.g. `T: Send`).
debug!("note_obligation_cause_for_async_await: next_code={:?}", next_code);
debug!(?next_code);
self.note_obligation_cause_code(
err,
&obligation.predicate,
@ -2688,6 +2677,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
));
}
#[instrument(
level = "debug", skip(self, err), fields(trait_pred.self_ty = ?trait_pred.self_ty())
)]
fn suggest_await_before_try(
&self,
err: &mut Diagnostic,
@ -2695,13 +2687,6 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
trait_pred: ty::PolyTraitPredicate<'tcx>,
span: Span,
) {
debug!(
"suggest_await_before_try: obligation={:?}, span={:?}, trait_pred={:?}, trait_pred_self_ty={:?}",
obligation,
span,
trait_pred,
trait_pred.self_ty()
);
let body_hir_id = obligation.cause.body_id;
let item_id = self.tcx.hir().get_parent_node(body_hir_id);
@ -2739,14 +2724,13 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
);
debug!(
"suggest_await_before_try: normalized_projection_type {:?}",
self.resolve_vars_if_possible(projection_ty)
normalized_projection_type = ?self.resolve_vars_if_possible(projection_ty)
);
let try_obligation = self.mk_trait_obligation_with_new_self_ty(
obligation.param_env,
trait_pred.map_bound(|trait_pred| (trait_pred, projection_ty.skip_binder())),
);
debug!("suggest_await_before_try: try_trait_obligation {:?}", try_obligation);
debug!(try_trait_obligation = ?try_obligation);
if self.predicate_may_hold(&try_obligation)
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
&& snippet.ends_with('?')