1
Fork 0

Use if-let guards in the codebase

This commit is contained in:
Léo Lanteri Thauvin 2021-08-16 17:29:49 +02:00
parent a992a11913
commit fde1b76b4b
27 changed files with 242 additions and 254 deletions

View file

@ -564,11 +564,11 @@ impl NestedMetaItem {
I: Iterator<Item = TokenTree>, I: Iterator<Item = TokenTree>,
{ {
match tokens.peek() { match tokens.peek() {
Some(TokenTree::Token(token)) => { Some(TokenTree::Token(token))
if let Ok(lit) = Lit::from_token(token) { if let Ok(lit) = Lit::from_token(token) =>
tokens.next(); {
return Some(NestedMetaItem::Literal(lit)); tokens.next();
} return Some(NestedMetaItem::Literal(lit));
} }
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => { Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
let inner_tokens = inner_tokens.clone(); let inner_tokens = inner_tokens.clone();

View file

@ -11,10 +11,12 @@
#![feature(box_patterns)] #![feature(box_patterns)]
#![cfg_attr(bootstrap, feature(const_fn_transmute))] #![cfg_attr(bootstrap, feature(const_fn_transmute))]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
#![feature(if_let_guard)]
#![feature(iter_zip)] #![feature(iter_zip)]
#![feature(label_break_value)] #![feature(label_break_value)]
#![feature(nll)] #![feature(nll)]
#![feature(min_specialization)] #![feature(min_specialization)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#![recursion_limit = "256"] #![recursion_limit = "256"]
#[macro_use] #[macro_use]

View file

@ -5,9 +5,11 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
#![feature(backtrace)] #![feature(backtrace)]
#![feature(if_let_guard)]
#![feature(format_args_capture)] #![feature(format_args_capture)]
#![feature(iter_zip)] #![feature(iter_zip)]
#![feature(nll)] #![feature(nll)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;
@ -1027,15 +1029,15 @@ impl HandlerInner {
let mut error_codes = self let mut error_codes = self
.emitted_diagnostic_codes .emitted_diagnostic_codes
.iter() .iter()
.filter_map(|x| match &x { .filter_map(|x| {
DiagnosticId::Error(s) => { match &x {
if let Ok(Some(_explanation)) = registry.try_find_description(s) { DiagnosticId::Error(s)
Some(s.clone()) if let Ok(Some(_explanation)) = registry.try_find_description(s) =>
} else { {
None Some(s.clone())
}
} }
_ => None, _ => None,
}
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !error_codes.is_empty() { if !error_codes.is_empty() {

View file

@ -305,15 +305,14 @@ impl<'a> StripUnconfigured<'a> {
Some((AttrAnnotatedTokenTree::Delimited(sp, delim, inner), *spacing)) Some((AttrAnnotatedTokenTree::Delimited(sp, delim, inner), *spacing))
.into_iter() .into_iter()
} }
AttrAnnotatedTokenTree::Token(ref token) if let TokenKind::Interpolated(ref nt) = token.kind => {
panic!(
"Nonterminal should have been flattened at {:?}: {:?}",
token.span, nt
);
}
AttrAnnotatedTokenTree::Token(token) => { AttrAnnotatedTokenTree::Token(token) => {
if let TokenKind::Interpolated(nt) = token.kind { Some((AttrAnnotatedTokenTree::Token(token), *spacing)).into_iter()
panic!(
"Nonterminal should have been flattened at {:?}: {:?}",
token.span, nt
);
} else {
Some((AttrAnnotatedTokenTree::Token(token), *spacing)).into_iter()
}
} }
}) })
.collect(); .collect();

View file

@ -2,11 +2,13 @@
#![feature(decl_macro)] #![feature(decl_macro)]
#![feature(destructuring_assignment)] #![feature(destructuring_assignment)]
#![feature(format_args_capture)] #![feature(format_args_capture)]
#![feature(if_let_guard)]
#![feature(iter_zip)] #![feature(iter_zip)]
#![feature(proc_macro_diagnostic)] #![feature(proc_macro_diagnostic)]
#![feature(proc_macro_internals)] #![feature(proc_macro_internals)]
#![feature(proc_macro_span)] #![feature(proc_macro_span)]
#![feature(try_blocks)] #![feature(try_blocks)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View file

@ -86,13 +86,12 @@ crate fn mod_dir_path(
inline: Inline, inline: Inline,
) -> (PathBuf, DirOwnership) { ) -> (PathBuf, DirOwnership) {
match inline { match inline {
Inline::Yes if let Some(file_path) = mod_file_path_from_attr(sess, attrs, &module.dir_path) => {
// For inline modules file path from `#[path]` is actually the directory path
// for historical reasons, so we don't pop the last segment here.
(file_path, DirOwnership::Owned { relative: None })
}
Inline::Yes => { Inline::Yes => {
if let Some(file_path) = mod_file_path_from_attr(sess, attrs, &module.dir_path) {
// For inline modules file path from `#[path]` is actually the directory path
// for historical reasons, so we don't pop the last segment here.
return (file_path, DirOwnership::Owned { relative: None });
}
// We have to push on the current module name in the case of relative // We have to push on the current module name in the case of relative
// paths in order to ensure that any additional module paths from inline // paths in order to ensure that any additional module paths from inline
// `mod x { ... }` come after the relative extension. // `mod x { ... }` come after the relative extension.

View file

@ -178,18 +178,19 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_>)>
tt!(Punct::new('#', false)) tt!(Punct::new('#', false))
} }
Interpolated(nt)
if let Some((name, is_raw)) = ident_name_compatibility_hack(&nt, span, rustc) =>
{
TokenTree::Ident(Ident::new(rustc.sess, name.name, is_raw, name.span))
}
Interpolated(nt) => { Interpolated(nt) => {
if let Some((name, is_raw)) = ident_name_compatibility_hack(&nt, span, rustc) { let stream = nt_to_tokenstream(&nt, rustc.sess, CanSynthesizeMissingTokens::No);
TokenTree::Ident(Ident::new(rustc.sess, name.name, is_raw, name.span)) TokenTree::Group(Group {
} else { delimiter: Delimiter::None,
let stream = nt_to_tokenstream(&nt, rustc.sess, CanSynthesizeMissingTokens::No); stream,
TokenTree::Group(Group { span: DelimSpan::from_single(span),
delimiter: Delimiter::None, flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess),
stream, })
span: DelimSpan::from_single(span),
flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess),
})
}
} }
OpenDelim(..) | CloseDelim(..) => unreachable!(), OpenDelim(..) | CloseDelim(..) => unreachable!(),

View file

@ -31,6 +31,7 @@
#![feature(box_patterns)] #![feature(box_patterns)]
#![feature(core_intrinsics)] #![feature(core_intrinsics)]
#![feature(discriminant_kind)] #![feature(discriminant_kind)]
#![feature(if_let_guard)]
#![feature(never_type)] #![feature(never_type)]
#![feature(extern_types)] #![feature(extern_types)]
#![feature(new_uninit)] #![feature(new_uninit)]
@ -52,6 +53,7 @@
#![feature(try_reserve)] #![feature(try_reserve)]
#![feature(try_reserve_kind)] #![feature(try_reserve_kind)]
#![feature(nonzero_ops)] #![feature(nonzero_ops)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#![recursion_limit = "512"] #![recursion_limit = "512"]
#[macro_use] #[macro_use]

View file

@ -279,13 +279,10 @@ impl<'tcx> ty::TyS<'tcx> {
} }
ty::FnDef(..) => "fn item".into(), ty::FnDef(..) => "fn item".into(),
ty::FnPtr(_) => "fn pointer".into(), ty::FnPtr(_) => "fn pointer".into(),
ty::Dynamic(ref inner, ..) => { ty::Dynamic(ref inner, ..) if let Some(principal) = inner.principal() => {
if let Some(principal) = inner.principal() { format!("trait object `dyn {}`", tcx.def_path_str(principal.def_id())).into()
format!("trait object `dyn {}`", tcx.def_path_str(principal.def_id())).into()
} else {
"trait object".into()
}
} }
ty::Dynamic(..) => "trait object".into(),
ty::Closure(..) => "closure".into(), ty::Closure(..) => "closure".into(),
ty::Generator(def_id, ..) => tcx.generator_kind(def_id).unwrap().descr().into(), ty::Generator(def_id, ..) => tcx.generator_kind(def_id).unwrap().descr().into(),
ty::GeneratorWitness(..) => "generator witness".into(), ty::GeneratorWitness(..) => "generator witness".into(),
@ -365,20 +362,19 @@ impl<'tcx> TyCtxt<'tcx> {
// Issue #63167 // Issue #63167
db.note("distinct uses of `impl Trait` result in different opaque types"); db.note("distinct uses of `impl Trait` result in different opaque types");
} }
(ty::Float(_), ty::Infer(ty::IntVar(_))) => { (ty::Float(_), ty::Infer(ty::IntVar(_)))
if let Ok( if let Ok(
// Issue #53280 // Issue #53280
snippet, snippet,
) = self.sess.source_map().span_to_snippet(sp) ) = self.sess.source_map().span_to_snippet(sp) =>
{ {
if snippet.chars().all(|c| c.is_digit(10) || c == '-' || c == '_') { if snippet.chars().all(|c| c.is_digit(10) || c == '-' || c == '_') {
db.span_suggestion( db.span_suggestion(
sp, sp,
"use a float literal", "use a float literal",
format!("{}.0", snippet), format!("{}.0", snippet),
MachineApplicable, MachineApplicable,
); );
}
} }
} }
(ty::Param(expected), ty::Param(found)) => { (ty::Param(expected), ty::Param(found)) => {

View file

@ -225,14 +225,12 @@ impl<'tcx> TyCtxt<'tcx> {
} }
} }
ty::Tuple(tys) => { ty::Tuple(tys) if let Some((&last_ty, _)) = tys.split_last() => {
if let Some((&last_ty, _)) = tys.split_last() { ty = last_ty.expect_ty();
ty = last_ty.expect_ty();
} else {
break;
}
} }
ty::Tuple(_) => break,
ty::Projection(_) | ty::Opaque(..) => { ty::Projection(_) | ty::Opaque(..) => {
let normalized = normalize(ty); let normalized = normalize(ty);
if ty == normalized { if ty == normalized {

View file

@ -2,8 +2,10 @@
#![feature(array_windows)] #![feature(array_windows)]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
#![feature(if_let_guard)]
#![cfg_attr(bootstrap, feature(bindings_after_at))] #![cfg_attr(bootstrap, feature(bindings_after_at))]
#![feature(box_patterns)] #![feature(box_patterns)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#![recursion_limit = "256"] #![recursion_limit = "256"]
use rustc_ast as ast; use rustc_ast as ast;
@ -262,20 +264,17 @@ pub fn nt_to_tokenstream(
let tokens = match *nt { let tokens = match *nt {
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()), Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) => { Nonterminal::NtStmt(ref stmt) if let ast::StmtKind::Empty = stmt.kind => {
if let ast::StmtKind::Empty = stmt.kind { let tokens = AttrAnnotatedTokenStream::new(vec![(
let tokens = AttrAnnotatedTokenStream::new(vec![( tokenstream::AttrAnnotatedTokenTree::Token(Token::new(
tokenstream::AttrAnnotatedTokenTree::Token(Token::new( TokenKind::Semi,
TokenKind::Semi, stmt.span,
stmt.span, )),
)), Spacing::Alone,
Spacing::Alone, )]);
)]); prepend_attrs(&stmt.attrs(), Some(&LazyTokenStream::new(tokens)))
prepend_attrs(&stmt.attrs(), Some(&LazyTokenStream::new(tokens)))
} else {
prepend_attrs(&stmt.attrs(), stmt.tokens())
}
} }
Nonterminal::NtStmt(ref stmt) => prepend_attrs(&stmt.attrs(), stmt.tokens()),
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()), Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()), Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
Nonterminal::NtIdent(ident, is_raw) => { Nonterminal::NtIdent(ident, is_raw) => {

View file

@ -143,15 +143,16 @@ impl<'a> Parser<'a> {
token::NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty())?) token::NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty())?)
} }
// this could be handled like a token, since it is one // this could be handled like a token, since it is one
NonterminalKind::Ident
if let Some((ident, is_raw)) = get_macro_ident(&self.token) =>
{
self.bump();
token::NtIdent(ident, is_raw)
}
NonterminalKind::Ident => { NonterminalKind::Ident => {
if let Some((ident, is_raw)) = get_macro_ident(&self.token) { let token_str = pprust::token_to_string(&self.token);
self.bump(); let msg = &format!("expected ident, found {}", &token_str);
token::NtIdent(ident, is_raw) return Err(self.struct_span_err(self.token.span, msg));
} else {
let token_str = pprust::token_to_string(&self.token);
let msg = &format!("expected ident, found {}", &token_str);
return Err(self.struct_span_err(self.token.span, msg));
}
} }
NonterminalKind::Path => token::NtPath( NonterminalKind::Path => token::NtPath(
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?, self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?,

View file

@ -493,21 +493,19 @@ impl<'a> Parser<'a> {
} }
} }
StmtKind::Expr(_) | StmtKind::MacCall(_) => {} StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
StmtKind::Local(ref mut local) => { StmtKind::Local(ref mut local) if let Err(e) = self.expect_semi() => {
if let Err(e) = self.expect_semi() { // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
// We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover. match &mut local.init {
match &mut local.init { Some(ref mut expr) => {
Some(ref mut expr) => { self.check_mistyped_turbofish_with_multiple_type_params(e, expr)?;
self.check_mistyped_turbofish_with_multiple_type_params(e, expr)?; // We found `foo<bar, baz>`, have we fully recovered?
// We found `foo<bar, baz>`, have we fully recovered? self.expect_semi()?;
self.expect_semi()?;
}
None => return Err(e),
} }
None => return Err(e),
} }
eat_semi = false; eat_semi = false;
} }
StmtKind::Empty | StmtKind::Item(_) | StmtKind::Semi(_) => eat_semi = false, StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => eat_semi = false,
} }
if eat_semi && self.eat(&token::Semi) { if eat_semi && self.eat(&token::Semi) {

View file

@ -24,16 +24,15 @@ pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
Some((name, _, template, _)) if name != sym::rustc_dummy => { Some((name, _, template, _)) if name != sym::rustc_dummy => {
check_builtin_attribute(sess, attr, name, template) check_builtin_attribute(sess, attr, name, template)
} }
_ => { _ if let MacArgs::Eq(..) = attr.get_normal_item().args => {
if let MacArgs::Eq(..) = attr.get_normal_item().args { // All key-value attributes are restricted to meta-item syntax.
// All key-value attributes are restricted to meta-item syntax. parse_meta(sess, attr)
parse_meta(sess, attr) .map_err(|mut err| {
.map_err(|mut err| { err.emit();
err.emit(); })
}) .ok();
.ok();
}
} }
_ => {}
} }
} }

View file

@ -1,5 +1,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(if_let_guard)]
#![feature(nll)] #![feature(nll)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#![recursion_limit = "256"] #![recursion_limit = "256"]
mod dump_visitor; mod dump_visitor;
@ -326,54 +328,53 @@ impl<'tcx> SaveContext<'tcx> {
attributes: lower_attributes(attrs.to_vec(), self), attributes: lower_attributes(attrs.to_vec(), self),
})) }))
} }
hir::ItemKind::Impl(hir::Impl { ref of_trait, ref self_ty, ref items, .. }) => { hir::ItemKind::Impl(hir::Impl { ref of_trait, ref self_ty, ref items, .. })
if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = self_ty.kind { if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = self_ty.kind =>
// Common case impl for a struct or something basic. {
if generated_code(path.span) { // Common case impl for a struct or something basic.
return None; if generated_code(path.span) {
} return None;
let sub_span = path.segments.last().unwrap().ident.span;
filter!(self.span_utils, sub_span);
let impl_id = self.next_impl_id();
let span = self.span_from_span(sub_span);
let type_data = self.lookup_def_id(self_ty.hir_id);
type_data.map(|type_data| {
Data::RelationData(
Relation {
kind: RelationKind::Impl { id: impl_id },
span: span.clone(),
from: id_from_def_id(type_data),
to: of_trait
.as_ref()
.and_then(|t| self.lookup_def_id(t.hir_ref_id))
.map(id_from_def_id)
.unwrap_or_else(null_id),
},
Impl {
id: impl_id,
kind: match *of_trait {
Some(_) => ImplKind::Direct,
None => ImplKind::Inherent,
},
span,
value: String::new(),
parent: None,
children: items
.iter()
.map(|i| id_from_def_id(i.id.def_id.to_def_id()))
.collect(),
docs: String::new(),
sig: None,
attributes: vec![],
},
)
})
} else {
None
} }
let sub_span = path.segments.last().unwrap().ident.span;
filter!(self.span_utils, sub_span);
let impl_id = self.next_impl_id();
let span = self.span_from_span(sub_span);
let type_data = self.lookup_def_id(self_ty.hir_id);
type_data.map(|type_data| {
Data::RelationData(
Relation {
kind: RelationKind::Impl { id: impl_id },
span: span.clone(),
from: id_from_def_id(type_data),
to: of_trait
.as_ref()
.and_then(|t| self.lookup_def_id(t.hir_ref_id))
.map(id_from_def_id)
.unwrap_or_else(null_id),
},
Impl {
id: impl_id,
kind: match *of_trait {
Some(_) => ImplKind::Direct,
None => ImplKind::Inherent,
},
span,
value: String::new(),
parent: None,
children: items
.iter()
.map(|i| id_from_def_id(i.id.def_id.to_def_id()))
.collect(),
docs: String::new(),
sig: None,
attributes: vec![],
},
)
})
} }
hir::ItemKind::Impl(_) => None,
_ => { _ => {
// FIXME // FIXME
bug!(); bug!();

View file

@ -16,10 +16,12 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(array_windows)] #![feature(array_windows)]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
#![feature(if_let_guard)]
#![feature(negative_impls)] #![feature(negative_impls)]
#![feature(nll)] #![feature(nll)]
#![feature(min_specialization)] #![feature(min_specialization)]
#![feature(thread_local_const_init)] #![feature(thread_local_const_init)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View file

@ -982,15 +982,13 @@ impl SourceMap {
None None
} }
pub fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool { pub fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
source_file.add_external_src(|| match source_file.name { source_file.add_external_src(|| {
FileName::Real(ref name) => { match source_file.name {
if let Some(local_path) = name.local_path() { FileName::Real(ref name) if let Some(local_path) = name.local_path() => {
self.file_loader.read_file(local_path).ok() self.file_loader.read_file(local_path).ok()
} else {
None
} }
_ => None,
} }
_ => None,
}) })
} }
@ -1033,22 +1031,19 @@ impl FilePathMapping {
fn map_filename_prefix(&self, file: &FileName) -> (FileName, bool) { fn map_filename_prefix(&self, file: &FileName) -> (FileName, bool) {
match file { match file {
FileName::Real(realfile) => { FileName::Real(realfile) if let RealFileName::LocalPath(local_path) = realfile => {
if let RealFileName::LocalPath(local_path) = realfile { let (mapped_path, mapped) = self.map_prefix(local_path.to_path_buf());
let (mapped_path, mapped) = self.map_prefix(local_path.to_path_buf()); let realfile = if mapped {
let realfile = if mapped { RealFileName::Remapped {
RealFileName::Remapped { local_path: Some(local_path.clone()),
local_path: Some(local_path.clone()), virtual_name: mapped_path,
virtual_name: mapped_path, }
}
} else {
realfile.clone()
};
(FileName::Real(realfile), mapped)
} else { } else {
unreachable!("attempted to remap an already remapped filename"); realfile.clone()
} };
(FileName::Real(realfile), mapped)
} }
FileName::Real(_) => unreachable!("attempted to remap an already remapped filename"),
other => (other.clone(), false), other => (other.clone(), false),
} }
} }

View file

@ -2380,12 +2380,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
if let Some(i) = (param.index as usize).checked_sub(generics.parent_count) { if let Some(i) = (param.index as usize).checked_sub(generics.parent_count) {
// Our own parameters are the resolved lifetimes. // Our own parameters are the resolved lifetimes.
match param.kind { match param.kind {
GenericParamDefKind::Lifetime => { GenericParamDefKind::Lifetime
if let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] { if let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] =>
self.ast_region_to_region(lifetime, None).into() {
} else { self.ast_region_to_region(lifetime, None).into()
bug!()
}
} }
_ => bug!(), _ => bug!(),
} }

View file

@ -1178,12 +1178,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut user_self_ty = None; let mut user_self_ty = None;
let mut is_alias_variant_ctor = false; let mut is_alias_variant_ctor = false;
match res { match res {
Res::Def(DefKind::Ctor(CtorOf::Variant, _), _) => { Res::Def(DefKind::Ctor(CtorOf::Variant, _), _)
if let Some(self_ty) = self_ty { if let Some(self_ty) = self_ty =>
let adt_def = self_ty.ty_adt_def().unwrap(); {
user_self_ty = Some(UserSelfTy { impl_def_id: adt_def.did, self_ty }); let adt_def = self_ty.ty_adt_def().unwrap();
is_alias_variant_ctor = true; user_self_ty = Some(UserSelfTy { impl_def_id: adt_def.did, self_ty });
} is_alias_variant_ctor = true;
} }
Res::Def(DefKind::AssocFn | DefKind::AssocConst, def_id) => { Res::Def(DefKind::AssocFn | DefKind::AssocConst, def_id) => {
let container = tcx.associated_item(def_id).container; let container = tcx.associated_item(def_id).container;

View file

@ -616,32 +616,30 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let lang_items = self.tcx.lang_items(); let lang_items = self.tcx.lang_items();
match *self_ty.value.value.kind() { match *self_ty.value.value.kind() {
ty::Dynamic(ref data, ..) => { ty::Dynamic(ref data, ..) if let Some(p) = data.principal() => {
if let Some(p) = data.principal() { // Subtle: we can't use `instantiate_query_response` here: using it will
// Subtle: we can't use `instantiate_query_response` here: using it will // commit to all of the type equalities assumed by inference going through
// commit to all of the type equalities assumed by inference going through // autoderef (see the `method-probe-no-guessing` test).
// autoderef (see the `method-probe-no-guessing` test). //
// // However, in this code, it is OK if we end up with an object type that is
// However, in this code, it is OK if we end up with an object type that is // "more general" than the object type that we are evaluating. For *every*
// "more general" than the object type that we are evaluating. For *every* // object type `MY_OBJECT`, a function call that goes through a trait-ref
// object type `MY_OBJECT`, a function call that goes through a trait-ref // of the form `<MY_OBJECT as SuperTraitOf(MY_OBJECT)>::func` is a valid
// of the form `<MY_OBJECT as SuperTraitOf(MY_OBJECT)>::func` is a valid // `ObjectCandidate`, and it should be discoverable "exactly" through one
// `ObjectCandidate`, and it should be discoverable "exactly" through one // of the iterations in the autoderef loop, so there is no problem with it
// of the iterations in the autoderef loop, so there is no problem with it // being discoverable in another one of these iterations.
// being discoverable in another one of these iterations. //
// // Using `instantiate_canonical_with_fresh_inference_vars` on our
// Using `instantiate_canonical_with_fresh_inference_vars` on our // `Canonical<QueryResponse<Ty<'tcx>>>` and then *throwing away* the
// `Canonical<QueryResponse<Ty<'tcx>>>` and then *throwing away* the // `CanonicalVarValues` will exactly give us such a generalization - it
// `CanonicalVarValues` will exactly give us such a generalization - it // will still match the original object type, but it won't pollute our
// will still match the original object type, but it won't pollute our // type variables in any form, so just do that!
// type variables in any form, so just do that! let (QueryResponse { value: generalized_self_ty, .. }, _ignored_var_values) =
let (QueryResponse { value: generalized_self_ty, .. }, _ignored_var_values) = self.fcx
self.fcx .instantiate_canonical_with_fresh_inference_vars(self.span, &self_ty);
.instantiate_canonical_with_fresh_inference_vars(self.span, &self_ty);
self.assemble_inherent_candidates_from_object(generalized_self_ty); self.assemble_inherent_candidates_from_object(generalized_self_ty);
self.assemble_inherent_impl_candidates_for_type(p.def_id()); self.assemble_inherent_impl_candidates_for_type(p.def_id());
}
} }
ty::Adt(def, _) => { ty::Adt(def, _) => {
self.assemble_inherent_impl_candidates_for_type(def.did); self.assemble_inherent_impl_candidates_for_type(def.did);

View file

@ -627,15 +627,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let binding_parent = tcx.hir().get(binding_parent_id); let binding_parent = tcx.hir().get(binding_parent_id);
debug!("inner {:?} pat {:?} parent {:?}", inner, pat, binding_parent); debug!("inner {:?} pat {:?} parent {:?}", inner, pat, binding_parent);
match binding_parent { match binding_parent {
hir::Node::Param(hir::Param { span, .. }) => { hir::Node::Param(hir::Param { span, .. })
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(inner.span) { if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(inner.span) =>
err.span_suggestion( {
*span, err.span_suggestion(
&format!("did you mean `{}`", snippet), *span,
format!(" &{}", expected), &format!("did you mean `{}`", snippet),
Applicability::MachineApplicable, format!(" &{}", expected),
); Applicability::MachineApplicable,
} );
} }
hir::Node::Arm(_) | hir::Node::Pat(_) => { hir::Node::Arm(_) | hir::Node::Pat(_) => {
// rely on match ergonomics or it might be nested `&&pat` // rely on match ergonomics or it might be nested `&&pat`
@ -1293,13 +1293,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(Some(mut err), None) => { (Some(mut err), None) => {
err.emit(); err.emit();
} }
(None, None) => { (None, None) if let Some(mut err) =
if let Some(mut err) = self.error_tuple_variant_index_shorthand(variant, pat, fields) =>
self.error_tuple_variant_index_shorthand(variant, pat, fields) {
{ err.emit();
err.emit();
}
} }
(None, None) => {}
} }
no_field_errors no_field_errors
} }

View file

@ -1049,26 +1049,24 @@ fn check_opaque_types<'fcx, 'tcx>(
let arg_is_param = match arg.unpack() { let arg_is_param = match arg.unpack() {
GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)), GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)),
GenericArgKind::Lifetime(region) => { GenericArgKind::Lifetime(region) if let ty::ReStatic = region => {
if let ty::ReStatic = region { tcx.sess
tcx.sess .struct_span_err(
.struct_span_err( span,
span, "non-defining opaque type use in defining scope",
"non-defining opaque type use in defining scope", )
) .span_label(
.span_label( tcx.def_span(generics.param_at(i, tcx).def_id),
tcx.def_span(generics.param_at(i, tcx).def_id), "cannot use static lifetime; use a bound lifetime \
"cannot use static lifetime; use a bound lifetime \ instead or remove the lifetime parameter from the \
instead or remove the lifetime parameter from the \ opaque type",
opaque type", )
) .emit();
.emit(); continue;
continue;
}
true
} }
GenericArgKind::Lifetime(_) => true,
GenericArgKind::Const(ct) => matches!(ct.val, ty::ConstKind::Param(_)), GenericArgKind::Const(ct) => matches!(ct.val, ty::ConstKind::Param(_)),
}; };

View file

@ -175,10 +175,10 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
} }
} }
} }
hir::ExprKind::AssignOp(..) => { hir::ExprKind::AssignOp(..)
if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) { if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
a.pop(); {
} a.pop();
} }
_ => {} _ => {}
} }

View file

@ -446,13 +446,13 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
} }
} }
Node::AnonConst(_) => { Node::AnonConst(_) if let Some(param) = tcx.opt_const_param_of(def_id) => {
if let Some(param) = tcx.opt_const_param_of(def_id) { // We defer to `type_of` of the corresponding parameter
// We defer to `type_of` of the corresponding parameter // for generic arguments.
// for generic arguments. tcx.type_of(param)
return tcx.type_of(param); }
}
Node::AnonConst(_) => {
let parent_node = tcx.hir().get(tcx.hir().get_parent_node(hir_id)); let parent_node = tcx.hir().get(tcx.hir().get_parent_node(hir_id));
match parent_node { match parent_node {
Node::Ty(&Ty { kind: TyKind::Array(_, ref constant), .. }) Node::Ty(&Ty { kind: TyKind::Array(_, ref constant), .. })

View file

@ -60,6 +60,7 @@ This API is completely unstable and subject to change.
#![feature(bool_to_option)] #![feature(bool_to_option)]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
#![feature(format_args_capture)] #![feature(format_args_capture)]
#![feature(if_let_guard)]
#![feature(in_band_lifetimes)] #![feature(in_band_lifetimes)]
#![feature(is_sorted)] #![feature(is_sorted)]
#![feature(iter_zip)] #![feature(iter_zip)]
@ -68,6 +69,7 @@ This API is completely unstable and subject to change.
#![feature(never_type)] #![feature(never_type)]
#![feature(slice_partition_dedup)] #![feature(slice_partition_dedup)]
#![feature(control_flow_enum)] #![feature(control_flow_enum)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
#![recursion_limit = "256"] #![recursion_limit = "256"]
#[macro_use] #[macro_use]

View file

@ -69,6 +69,7 @@
#![warn(missing_debug_implementations)] #![warn(missing_debug_implementations)]
#![warn(missing_docs)] #![warn(missing_docs)]
#![allow(explicit_outlives_requirements)] #![allow(explicit_outlives_requirements)]
#![cfg_attr(bootstrap, allow(incomplete_features))] // if_let_guard
// //
// Library features for const fns: // Library features for const fns:
#![feature(const_align_of_val)] #![feature(const_align_of_val)]
@ -134,6 +135,7 @@
#![feature(exhaustive_patterns)] #![feature(exhaustive_patterns)]
#![feature(extern_types)] #![feature(extern_types)]
#![feature(fundamental)] #![feature(fundamental)]
#![feature(if_let_guard)]
#![feature(intra_doc_pointers)] #![feature(intra_doc_pointers)]
#![feature(intrinsics)] #![feature(intrinsics)]
#![feature(lang_items)] #![feature(lang_items)]

View file

@ -236,13 +236,8 @@ pub fn dec2flt<F: RawFloat>(s: &str) -> Result<F, ParseFloatError> {
let num = match parse_number(s, negative) { let num = match parse_number(s, negative) {
Some(r) => r, Some(r) => r,
None => { None if let Some(value) = parse_inf_nan(s, negative) => return Ok(value),
if let Some(value) = parse_inf_nan(s, negative) { None => return Err(pfe_invalid()),
return Ok(value);
} else {
return Err(pfe_invalid());
}
}
}; };
if let Some(value) = num.try_fast_path::<F>() { if let Some(value) = num.try_fast_path::<F>() {
return Ok(value); return Ok(value);