Auto merge of #63124 - Centril:rollup-onohtqt, r=Centril
Rollup of 12 pull requests Successful merges: - #61965 (Remove mentions of removed `offset_to` method from `align_offset` docs) - #62928 (Syntax: Recover on `for ( $pat in $expr ) $block`) - #63000 (Impl Debug for Chars) - #63083 (Make generic parameters always use modern hygiene) - #63087 (Add very simple edition check to tidy.) - #63093 (Properly check the defining scope of existential types) - #63096 (Add tests for some `existential_type` ICEs) - #63099 (vxworks: Remove Linux-specific comments.) - #63106 (ci: Skip installing SWIG/xz on OSX ) - #63108 (Add links to None in Option doc) - #63109 (std: Fix a failing `fs` test on Windows) - #63111 (Add syntactic and semantic tests for rest patterns, i.e. `..`) Failed merges: r? @ghost
This commit is contained in:
commit
4eeaaa722d
44 changed files with 1042 additions and 247 deletions
|
@ -62,17 +62,6 @@ steps:
|
||||||
- template: install-sccache.yml
|
- template: install-sccache.yml
|
||||||
- template: install-clang.yml
|
- template: install-clang.yml
|
||||||
|
|
||||||
# Install some dependencies needed to build LLDB/Clang, currently only needed
|
|
||||||
# during the `dist` target
|
|
||||||
- bash: |
|
|
||||||
set -e
|
|
||||||
brew update
|
|
||||||
brew install xz
|
|
||||||
brew install swig@3
|
|
||||||
brew link --force swig@3
|
|
||||||
displayName: Install build dependencies (OSX)
|
|
||||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'), eq(variables['SCRIPT'],'./x.py dist'))
|
|
||||||
|
|
||||||
# Switch to XCode 9.3 on OSX since it seems to be the last version that supports
|
# Switch to XCode 9.3 on OSX since it seems to be the last version that supports
|
||||||
# i686-apple-darwin. We'll eventually want to upgrade this and it will probably
|
# i686-apple-darwin. We'll eventually want to upgrade this and it will probably
|
||||||
# force us to drop i686-apple-darwin, but let's keep the wheels turning for now.
|
# force us to drop i686-apple-darwin, but let's keep the wheels turning for now.
|
||||||
|
|
|
@ -1108,6 +1108,16 @@ fn test_iterator_last() {
|
||||||
assert_eq!(it.last(), Some('m'));
|
assert_eq!(it.last(), Some('m'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_chars_debug() {
|
||||||
|
let s = "ศไทย中华Việt Nam";
|
||||||
|
let c = s.chars();
|
||||||
|
assert_eq!(
|
||||||
|
format!("{:?}", c),
|
||||||
|
r#"Chars(['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'])"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_bytesator() {
|
fn test_bytesator() {
|
||||||
let s = "ศไทย中华Việt Nam";
|
let s = "ศไทย中华Việt Nam";
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
//! * Initial values
|
//! * Initial values
|
||||||
//! * Return values for functions that are not defined
|
//! * Return values for functions that are not defined
|
||||||
//! over their entire input range (partial functions)
|
//! over their entire input range (partial functions)
|
||||||
//! * Return value for otherwise reporting simple errors, where `None` is
|
//! * Return value for otherwise reporting simple errors, where [`None`] is
|
||||||
//! returned on error
|
//! returned on error
|
||||||
//! * Optional struct fields
|
//! * Optional struct fields
|
||||||
//! * Struct fields that can be loaned or "taken"
|
//! * Struct fields that can be loaned or "taken"
|
||||||
|
@ -752,7 +752,7 @@ impl<T> Option<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns [`Some`] if exactly one of `self`, `optb` is [`Some`], otherwise returns `None`.
|
/// Returns [`Some`] if exactly one of `self`, `optb` is [`Some`], otherwise returns [`None`].
|
||||||
///
|
///
|
||||||
/// [`Some`]: #variant.Some
|
/// [`Some`]: #variant.Some
|
||||||
/// [`None`]: #variant.None
|
/// [`None`]: #variant.None
|
||||||
|
|
|
@ -1609,7 +1609,7 @@ impl<T: ?Sized> *const T {
|
||||||
/// `usize::max_value()`.
|
/// `usize::max_value()`.
|
||||||
///
|
///
|
||||||
/// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
|
/// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
|
||||||
/// used with the `offset` or `offset_to` methods.
|
/// used with the `add` method.
|
||||||
///
|
///
|
||||||
/// There are no guarantees whatsover that offsetting the pointer will not overflow or go
|
/// There are no guarantees whatsover that offsetting the pointer will not overflow or go
|
||||||
/// beyond the allocation that the pointer points into. It is up to the caller to ensure that
|
/// beyond the allocation that the pointer points into. It is up to the caller to ensure that
|
||||||
|
@ -2410,7 +2410,7 @@ impl<T: ?Sized> *mut T {
|
||||||
/// `usize::max_value()`.
|
/// `usize::max_value()`.
|
||||||
///
|
///
|
||||||
/// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
|
/// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
|
||||||
/// used with the `offset` or `offset_to` methods.
|
/// used with the `add` method.
|
||||||
///
|
///
|
||||||
/// There are no guarantees whatsover that offsetting the pointer will not overflow or go
|
/// There are no guarantees whatsover that offsetting the pointer will not overflow or go
|
||||||
/// beyond the allocation that the pointer points into. It is up to the caller to ensure that
|
/// beyond the allocation that the pointer points into. It is up to the caller to ensure that
|
||||||
|
|
|
@ -464,7 +464,7 @@ Section: Iterators
|
||||||
///
|
///
|
||||||
/// [`chars`]: ../../std/primitive.str.html#method.chars
|
/// [`chars`]: ../../std/primitive.str.html#method.chars
|
||||||
/// [`str`]: ../../std/primitive.str.html
|
/// [`str`]: ../../std/primitive.str.html
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone)]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub struct Chars<'a> {
|
pub struct Chars<'a> {
|
||||||
iter: slice::Iter<'a, u8>
|
iter: slice::Iter<'a, u8>
|
||||||
|
@ -600,6 +600,16 @@ impl<'a> Iterator for Chars<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "chars_debug_impl", since = "1.38.0")]
|
||||||
|
impl fmt::Debug for Chars<'_> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "Chars(")?;
|
||||||
|
f.debug_list().entries(self.clone()).finish()?;
|
||||||
|
write!(f, ")")?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a> DoubleEndedIterator for Chars<'a> {
|
impl<'a> DoubleEndedIterator for Chars<'a> {
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -1189,11 +1189,7 @@ pub fn may_define_existential_type(
|
||||||
opaque_hir_id: hir::HirId,
|
opaque_hir_id: hir::HirId,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
|
let mut hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
|
||||||
trace!(
|
|
||||||
"may_define_existential_type(def={:?}, opaque_node={:?})",
|
|
||||||
tcx.hir().get(hir_id),
|
|
||||||
tcx.hir().get(opaque_hir_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Named existential types can be defined by any siblings or children of siblings.
|
// Named existential types can be defined by any siblings or children of siblings.
|
||||||
let scope = tcx.hir().get_defining_scope(opaque_hir_id).expect("could not get defining scope");
|
let scope = tcx.hir().get_defining_scope(opaque_hir_id).expect("could not get defining scope");
|
||||||
|
@ -1202,5 +1198,12 @@ pub fn may_define_existential_type(
|
||||||
hir_id = tcx.hir().get_parent_item(hir_id);
|
hir_id = tcx.hir().get_parent_item(hir_id);
|
||||||
}
|
}
|
||||||
// Syntactically, we are allowed to define the concrete type if:
|
// Syntactically, we are allowed to define the concrete type if:
|
||||||
hir_id == scope
|
let res = hir_id == scope;
|
||||||
|
trace!(
|
||||||
|
"may_define_existential_type(def={:?}, opaque_node={:?}) = {}",
|
||||||
|
tcx.hir().get(hir_id),
|
||||||
|
tcx.hir().get(opaque_hir_id),
|
||||||
|
res
|
||||||
|
);
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
|
@ -2568,7 +2568,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
||||||
let lifetimes: Vec<_> = params
|
let lifetimes: Vec<_> = params
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|param| match param.kind {
|
.filter_map(|param| match param.kind {
|
||||||
GenericParamKind::Lifetime { .. } => Some((param, param.name)),
|
GenericParamKind::Lifetime { .. } => Some((param, param.name.modern())),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -869,8 +869,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> {
|
||||||
debug!("(resolving function) entering function");
|
debug!("(resolving function) entering function");
|
||||||
let rib_kind = match function_kind {
|
let rib_kind = match function_kind {
|
||||||
FnKind::ItemFn(..) => FnItemRibKind,
|
FnKind::ItemFn(..) => FnItemRibKind,
|
||||||
FnKind::Method(..) => AssocItemRibKind,
|
FnKind::Method(..) | FnKind::Closure(_) => NormalRibKind,
|
||||||
FnKind::Closure(_) => NormalRibKind,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create a value rib for the function.
|
// Create a value rib for the function.
|
||||||
|
@ -2307,21 +2306,32 @@ impl<'a> Resolver<'a> {
|
||||||
if ident.name == kw::Invalid {
|
if ident.name == kw::Invalid {
|
||||||
return Some(LexicalScopeBinding::Res(Res::Err));
|
return Some(LexicalScopeBinding::Res(Res::Err));
|
||||||
}
|
}
|
||||||
ident.span = if ident.name == kw::SelfUpper {
|
let (general_span, modern_span) = if ident.name == kw::SelfUpper {
|
||||||
// FIXME(jseyfried) improve `Self` hygiene
|
// FIXME(jseyfried) improve `Self` hygiene
|
||||||
ident.span.with_ctxt(SyntaxContext::empty())
|
let empty_span = ident.span.with_ctxt(SyntaxContext::empty());
|
||||||
|
(empty_span, empty_span)
|
||||||
} else if ns == TypeNS {
|
} else if ns == TypeNS {
|
||||||
ident.span.modern()
|
let modern_span = ident.span.modern();
|
||||||
|
(modern_span, modern_span)
|
||||||
} else {
|
} else {
|
||||||
ident.span.modern_and_legacy()
|
(ident.span.modern_and_legacy(), ident.span.modern())
|
||||||
};
|
};
|
||||||
|
ident.span = general_span;
|
||||||
|
let modern_ident = Ident { span: modern_span, ..ident };
|
||||||
|
|
||||||
// Walk backwards up the ribs in scope.
|
// Walk backwards up the ribs in scope.
|
||||||
let record_used = record_used_id.is_some();
|
let record_used = record_used_id.is_some();
|
||||||
let mut module = self.graph_root;
|
let mut module = self.graph_root;
|
||||||
for i in (0 .. self.ribs[ns].len()).rev() {
|
for i in (0 .. self.ribs[ns].len()).rev() {
|
||||||
debug!("walk rib\n{:?}", self.ribs[ns][i].bindings);
|
debug!("walk rib\n{:?}", self.ribs[ns][i].bindings);
|
||||||
if let Some(res) = self.ribs[ns][i].bindings.get(&ident).cloned() {
|
// Use the rib kind to determine whether we are resolving parameters
|
||||||
|
// (modern hygiene) or local variables (legacy hygiene).
|
||||||
|
let rib_ident = if let AssocItemRibKind | ItemRibKind = self.ribs[ns][i].kind {
|
||||||
|
modern_ident
|
||||||
|
} else {
|
||||||
|
ident
|
||||||
|
};
|
||||||
|
if let Some(res) = self.ribs[ns][i].bindings.get(&rib_ident).cloned() {
|
||||||
// The ident resolves to a type parameter or local variable.
|
// The ident resolves to a type parameter or local variable.
|
||||||
return Some(LexicalScopeBinding::Res(
|
return Some(LexicalScopeBinding::Res(
|
||||||
self.validate_res_from_ribs(ns, i, res, record_used, path_span),
|
self.validate_res_from_ribs(ns, i, res, record_used, path_span),
|
||||||
|
@ -2357,7 +2367,7 @@ impl<'a> Resolver<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ident.span = ident.span.modern();
|
ident = modern_ident;
|
||||||
let mut poisoned = None;
|
let mut poisoned = None;
|
||||||
loop {
|
loop {
|
||||||
let opt_module = if let Some(node_id) = record_used_id {
|
let opt_module = if let Some(node_id) = record_used_id {
|
||||||
|
|
|
@ -1664,6 +1664,7 @@ fn find_existential_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
|
||||||
intravisit::NestedVisitorMap::All(&self.tcx.hir())
|
intravisit::NestedVisitorMap::All(&self.tcx.hir())
|
||||||
}
|
}
|
||||||
fn visit_item(&mut self, it: &'tcx Item) {
|
fn visit_item(&mut self, it: &'tcx Item) {
|
||||||
|
debug!("find_existential_constraints: visiting {:?}", it);
|
||||||
let def_id = self.tcx.hir().local_def_id(it.hir_id);
|
let def_id = self.tcx.hir().local_def_id(it.hir_id);
|
||||||
// The existential type itself or its children are not within its reveal scope.
|
// The existential type itself or its children are not within its reveal scope.
|
||||||
if def_id != self.def_id {
|
if def_id != self.def_id {
|
||||||
|
@ -1672,6 +1673,7 @@ fn find_existential_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn visit_impl_item(&mut self, it: &'tcx ImplItem) {
|
fn visit_impl_item(&mut self, it: &'tcx ImplItem) {
|
||||||
|
debug!("find_existential_constraints: visiting {:?}", it);
|
||||||
let def_id = self.tcx.hir().local_def_id(it.hir_id);
|
let def_id = self.tcx.hir().local_def_id(it.hir_id);
|
||||||
// The existential type itself or its children are not within its reveal scope.
|
// The existential type itself or its children are not within its reveal scope.
|
||||||
if def_id != self.def_id {
|
if def_id != self.def_id {
|
||||||
|
@ -1680,6 +1682,7 @@ fn find_existential_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn visit_trait_item(&mut self, it: &'tcx TraitItem) {
|
fn visit_trait_item(&mut self, it: &'tcx TraitItem) {
|
||||||
|
debug!("find_existential_constraints: visiting {:?}", it);
|
||||||
let def_id = self.tcx.hir().local_def_id(it.hir_id);
|
let def_id = self.tcx.hir().local_def_id(it.hir_id);
|
||||||
self.check(def_id);
|
self.check(def_id);
|
||||||
intravisit::walk_trait_item(self, it);
|
intravisit::walk_trait_item(self, it);
|
||||||
|
@ -1703,9 +1706,23 @@ fn find_existential_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
|
||||||
} else {
|
} else {
|
||||||
debug!("find_existential_constraints: scope={:?}", tcx.hir().get(scope));
|
debug!("find_existential_constraints: scope={:?}", tcx.hir().get(scope));
|
||||||
match tcx.hir().get(scope) {
|
match tcx.hir().get(scope) {
|
||||||
Node::Item(ref it) => intravisit::walk_item(&mut locator, it),
|
// We explicitly call `visit_*` methods, instead of using `intravisit::walk_*` methods
|
||||||
Node::ImplItem(ref it) => intravisit::walk_impl_item(&mut locator, it),
|
// This allows our visitor to process the defining item itself, causing
|
||||||
Node::TraitItem(ref it) => intravisit::walk_trait_item(&mut locator, it),
|
// it to pick up any 'sibling' defining uses.
|
||||||
|
//
|
||||||
|
// For example, this code:
|
||||||
|
// ```
|
||||||
|
// fn foo() {
|
||||||
|
// existential type Blah: Debug;
|
||||||
|
// let my_closure = || -> Blah { true };
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// requires us to explicitly process `foo()` in order
|
||||||
|
// to notice the defining usage of `Blah`.
|
||||||
|
Node::Item(ref it) => locator.visit_item(it),
|
||||||
|
Node::ImplItem(ref it) => locator.visit_impl_item(it),
|
||||||
|
Node::TraitItem(ref it) => locator.visit_trait_item(it),
|
||||||
other => bug!(
|
other => bug!(
|
||||||
"{:?} is not a valid scope for an existential type item",
|
"{:?} is not a valid scope for an existential type item",
|
||||||
other
|
other
|
||||||
|
|
|
@ -3316,11 +3316,11 @@ mod tests {
|
||||||
fs::create_dir_all(&d).unwrap();
|
fs::create_dir_all(&d).unwrap();
|
||||||
File::create(&f).unwrap();
|
File::create(&f).unwrap();
|
||||||
if cfg!(not(windows)) {
|
if cfg!(not(windows)) {
|
||||||
symlink_dir("../d/e", &c).unwrap();
|
symlink_file("../d/e", &c).unwrap();
|
||||||
symlink_file("../f", &e).unwrap();
|
symlink_file("../f", &e).unwrap();
|
||||||
}
|
}
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
symlink_dir(r"..\d\e", &c).unwrap();
|
symlink_file(r"..\d\e", &c).unwrap();
|
||||||
symlink_file(r"..\f", &e).unwrap();
|
symlink_file(r"..\f", &e).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -287,22 +287,7 @@ impl File {
|
||||||
let fd = cvt_r(|| unsafe {
|
let fd = cvt_r(|| unsafe {
|
||||||
open(path.as_ptr(), flags, opts.mode as c_int)
|
open(path.as_ptr(), flags, opts.mode as c_int)
|
||||||
})?;
|
})?;
|
||||||
let fd = FileDesc::new(fd);
|
Ok(File(FileDesc::new(fd)))
|
||||||
// Currently the standard library supports Linux 2.6.18 which did not
|
|
||||||
// have the O_CLOEXEC flag (passed above). If we're running on an older
|
|
||||||
// Linux kernel then the flag is just ignored by the OS. After we open
|
|
||||||
// the first file, we check whether it has CLOEXEC set. If it doesn't,
|
|
||||||
// we will explicitly ask for a CLOEXEC fd for every further file we
|
|
||||||
// open, if it does, we will skip that step.
|
|
||||||
//
|
|
||||||
// The CLOEXEC flag, however, is supported on versions of macOS/BSD/etc
|
|
||||||
// that we support, so we only do this on Linux currently.
|
|
||||||
fn ensure_cloexec(_: &FileDesc) -> io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_cloexec(&fd)?;
|
|
||||||
Ok(File(fd))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_attr(&self) -> io::Result<FileAttr> {
|
pub fn file_attr(&self) -> io::Result<FileAttr> {
|
||||||
|
|
|
@ -141,10 +141,6 @@ impl Socket {
|
||||||
|
|
||||||
pub fn accept(&self, storage: *mut sockaddr, len: *mut socklen_t)
|
pub fn accept(&self, storage: *mut sockaddr, len: *mut socklen_t)
|
||||||
-> io::Result<Socket> {
|
-> io::Result<Socket> {
|
||||||
// Unfortunately the only known way right now to accept a socket and
|
|
||||||
// atomically set the CLOEXEC flag is to use the `accept4` syscall on
|
|
||||||
// Linux. This was added in 2.6.28, however, and because we support
|
|
||||||
// 2.6.18 we must detect this support dynamically.
|
|
||||||
let fd = cvt_r(|| unsafe {
|
let fd = cvt_r(|| unsafe {
|
||||||
libc::accept(self.0.raw(), storage, len)
|
libc::accept(self.0.raw(), storage, len)
|
||||||
})?;
|
})?;
|
||||||
|
|
|
@ -11,11 +11,6 @@ pub fn anon_pipe() -> io::Result<(AnonPipe, AnonPipe)> {
|
||||||
static INVALID: AtomicBool = AtomicBool::new(false);
|
static INVALID: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
let mut fds = [0; 2];
|
let mut fds = [0; 2];
|
||||||
|
|
||||||
// Unfortunately the only known way right now to create atomically set the
|
|
||||||
// CLOEXEC flag is to use the `pipe2` syscall on Linux. This was added in
|
|
||||||
// 2.6.27, however, and because we support 2.6.18 we must detect this
|
|
||||||
// support dynamically.
|
|
||||||
cvt(unsafe { libc::pipe(fds.as_mut_ptr()) })?;
|
cvt(unsafe { libc::pipe(fds.as_mut_ptr()) })?;
|
||||||
|
|
||||||
let fd0 = FileDesc::new(fds[0]);
|
let fd0 = FileDesc::new(fds[0]);
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::ThinVec;
|
||||||
use crate::util::parser::AssocOp;
|
use crate::util::parser::AssocOp;
|
||||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use syntax_pos::{Span, DUMMY_SP, MultiSpan};
|
use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError};
|
||||||
use log::{debug, trace};
|
use log::{debug, trace};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
@ -199,6 +199,10 @@ impl<'a> Parser<'a> {
|
||||||
&self.sess.span_diagnostic
|
&self.sess.span_diagnostic
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crate fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
|
||||||
|
self.sess.source_map().span_to_snippet(span)
|
||||||
|
}
|
||||||
|
|
||||||
crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
|
crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
|
||||||
let mut err = self.struct_span_err(
|
let mut err = self.struct_span_err(
|
||||||
self.token.span,
|
self.token.span,
|
||||||
|
@ -549,8 +553,10 @@ impl<'a> Parser<'a> {
|
||||||
ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
|
ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
|
||||||
// respan to include both operators
|
// respan to include both operators
|
||||||
let op_span = op.span.to(self.token.span);
|
let op_span = op.span.to(self.token.span);
|
||||||
let mut err = self.diagnostic().struct_span_err(op_span,
|
let mut err = self.struct_span_err(
|
||||||
"chained comparison operators require parentheses");
|
op_span,
|
||||||
|
"chained comparison operators require parentheses",
|
||||||
|
);
|
||||||
if op.node == BinOpKind::Lt &&
|
if op.node == BinOpKind::Lt &&
|
||||||
*outer_op == AssocOp::Less || // Include `<` to provide this recommendation
|
*outer_op == AssocOp::Less || // Include `<` to provide this recommendation
|
||||||
*outer_op == AssocOp::Greater // even in a case like the following:
|
*outer_op == AssocOp::Greater // even in a case like the following:
|
||||||
|
@ -717,8 +723,6 @@ impl<'a> Parser<'a> {
|
||||||
path.span = ty_span.to(self.prev_span);
|
path.span = ty_span.to(self.prev_span);
|
||||||
|
|
||||||
let ty_str = self
|
let ty_str = self
|
||||||
.sess
|
|
||||||
.source_map()
|
|
||||||
.span_to_snippet(ty_span)
|
.span_to_snippet(ty_span)
|
||||||
.unwrap_or_else(|_| pprust::ty_to_string(&ty));
|
.unwrap_or_else(|_| pprust::ty_to_string(&ty));
|
||||||
self.diagnostic()
|
self.diagnostic()
|
||||||
|
@ -889,7 +893,7 @@ impl<'a> Parser<'a> {
|
||||||
err.span_label(await_sp, "while parsing this incorrect await expression");
|
err.span_label(await_sp, "while parsing this incorrect await expression");
|
||||||
err
|
err
|
||||||
})?;
|
})?;
|
||||||
let expr_str = self.sess.source_map().span_to_snippet(expr.span)
|
let expr_str = self.span_to_snippet(expr.span)
|
||||||
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
||||||
let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
|
let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
|
||||||
let sp = lo.to(expr.span);
|
let sp = lo.to(expr.span);
|
||||||
|
@ -923,6 +927,48 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Recover a situation like `for ( $pat in $expr )`
|
||||||
|
/// and suggest writing `for $pat in $expr` instead.
|
||||||
|
///
|
||||||
|
/// This should be called before parsing the `$block`.
|
||||||
|
crate fn recover_parens_around_for_head(
|
||||||
|
&mut self,
|
||||||
|
pat: P<Pat>,
|
||||||
|
expr: &Expr,
|
||||||
|
begin_paren: Option<Span>,
|
||||||
|
) -> P<Pat> {
|
||||||
|
match (&self.token.kind, begin_paren) {
|
||||||
|
(token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
|
||||||
|
self.bump();
|
||||||
|
|
||||||
|
let pat_str = self
|
||||||
|
// Remove the `(` from the span of the pattern:
|
||||||
|
.span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
|
||||||
|
.unwrap_or_else(|_| pprust::pat_to_string(&pat));
|
||||||
|
|
||||||
|
self.struct_span_err(self.prev_span, "unexpected closing `)`")
|
||||||
|
.span_label(begin_par_sp, "opening `(`")
|
||||||
|
.span_suggestion(
|
||||||
|
begin_par_sp.to(self.prev_span),
|
||||||
|
"remove parenthesis in `for` loop",
|
||||||
|
format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
|
||||||
|
// With e.g. `for (x) in y)` this would replace `(x) in y)`
|
||||||
|
// with `x) in y)` which is syntactically invalid.
|
||||||
|
// However, this is prevented before we get here.
|
||||||
|
Applicability::MachineApplicable,
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
|
||||||
|
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
|
||||||
|
pat.and_then(|pat| match pat.node {
|
||||||
|
PatKind::Paren(pat) => pat,
|
||||||
|
_ => P(pat),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
_ => pat,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|
crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|
||||||
self.token.is_ident() &&
|
self.token.is_ident() &&
|
||||||
if let ast::ExprKind::Path(..) = node { true } else { false } &&
|
if let ast::ExprKind::Path(..) = node { true } else { false } &&
|
||||||
|
@ -1105,17 +1151,14 @@ impl<'a> Parser<'a> {
|
||||||
crate fn check_for_for_in_in_typo(&mut self, in_span: Span) {
|
crate fn check_for_for_in_in_typo(&mut self, in_span: Span) {
|
||||||
if self.eat_keyword(kw::In) {
|
if self.eat_keyword(kw::In) {
|
||||||
// a common typo: `for _ in in bar {}`
|
// a common typo: `for _ in in bar {}`
|
||||||
let mut err = self.sess.span_diagnostic.struct_span_err(
|
self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`")
|
||||||
self.prev_span,
|
.span_suggestion_short(
|
||||||
"expected iterable, found keyword `in`",
|
in_span.until(self.prev_span),
|
||||||
);
|
"remove the duplicated `in`",
|
||||||
err.span_suggestion_short(
|
String::new(),
|
||||||
in_span.until(self.prev_span),
|
Applicability::MachineApplicable,
|
||||||
"remove the duplicated `in`",
|
)
|
||||||
String::new(),
|
.emit();
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1128,12 +1171,12 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
crate fn eat_incorrect_doc_comment_for_arg_type(&mut self) {
|
crate fn eat_incorrect_doc_comment_for_arg_type(&mut self) {
|
||||||
if let token::DocComment(_) = self.token.kind {
|
if let token::DocComment(_) = self.token.kind {
|
||||||
let mut err = self.diagnostic().struct_span_err(
|
self.struct_span_err(
|
||||||
self.token.span,
|
self.token.span,
|
||||||
"documentation comments cannot be applied to a function parameter's type",
|
"documentation comments cannot be applied to a function parameter's type",
|
||||||
);
|
)
|
||||||
err.span_label(self.token.span, "doc comments are not allowed here");
|
.span_label(self.token.span, "doc comments are not allowed here")
|
||||||
err.emit();
|
.emit();
|
||||||
self.bump();
|
self.bump();
|
||||||
} else if self.token == token::Pound && self.look_ahead(1, |t| {
|
} else if self.token == token::Pound && self.look_ahead(1, |t| {
|
||||||
*t == token::OpenDelim(token::Bracket)
|
*t == token::OpenDelim(token::Bracket)
|
||||||
|
@ -1145,12 +1188,12 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
let sp = lo.to(self.token.span);
|
let sp = lo.to(self.token.span);
|
||||||
self.bump();
|
self.bump();
|
||||||
let mut err = self.diagnostic().struct_span_err(
|
self.struct_span_err(
|
||||||
sp,
|
sp,
|
||||||
"attributes cannot be applied to a function parameter's type",
|
"attributes cannot be applied to a function parameter's type",
|
||||||
);
|
)
|
||||||
err.span_label(sp, "attributes are not allowed here");
|
.span_label(sp, "attributes are not allowed here")
|
||||||
err.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1206,18 +1249,19 @@ impl<'a> Parser<'a> {
|
||||||
self.expect(&token::Colon)?;
|
self.expect(&token::Colon)?;
|
||||||
let ty = self.parse_ty()?;
|
let ty = self.parse_ty()?;
|
||||||
|
|
||||||
let mut err = self.diagnostic().struct_span_err_with_code(
|
self.diagnostic()
|
||||||
pat.span,
|
.struct_span_err_with_code(
|
||||||
"patterns aren't allowed in methods without bodies",
|
pat.span,
|
||||||
DiagnosticId::Error("E0642".into()),
|
"patterns aren't allowed in methods without bodies",
|
||||||
);
|
DiagnosticId::Error("E0642".into()),
|
||||||
err.span_suggestion_short(
|
)
|
||||||
pat.span,
|
.span_suggestion_short(
|
||||||
"give this argument a name or use an underscore to ignore it",
|
pat.span,
|
||||||
"_".to_owned(),
|
"give this argument a name or use an underscore to ignore it",
|
||||||
Applicability::MachineApplicable,
|
"_".to_owned(),
|
||||||
);
|
Applicability::MachineApplicable,
|
||||||
err.emit();
|
)
|
||||||
|
.emit();
|
||||||
|
|
||||||
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
|
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
|
||||||
let pat = P(Pat {
|
let pat = P(Pat {
|
||||||
|
|
|
@ -2329,19 +2329,19 @@ impl<'a> Parser<'a> {
|
||||||
// This is a struct literal, but we don't can't accept them here
|
// This is a struct literal, but we don't can't accept them here
|
||||||
let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone());
|
let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone());
|
||||||
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
||||||
let mut err = self.diagnostic().struct_span_err(
|
self.struct_span_err(
|
||||||
expr.span,
|
expr.span,
|
||||||
"struct literals are not allowed here",
|
"struct literals are not allowed here",
|
||||||
);
|
)
|
||||||
err.multipart_suggestion(
|
.multipart_suggestion(
|
||||||
"surround the struct literal with parentheses",
|
"surround the struct literal with parentheses",
|
||||||
vec![
|
vec![
|
||||||
(lo.shrink_to_lo(), "(".to_string()),
|
(lo.shrink_to_lo(), "(".to_string()),
|
||||||
(expr.span.shrink_to_hi(), ")".to_string()),
|
(expr.span.shrink_to_hi(), ")".to_string()),
|
||||||
],
|
],
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
);
|
)
|
||||||
err.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
return Some(expr);
|
return Some(expr);
|
||||||
}
|
}
|
||||||
|
@ -2370,18 +2370,18 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if self.token == token::Comma {
|
if self.token == token::Comma {
|
||||||
let mut err = self.sess.span_diagnostic.mut_span_err(
|
self.struct_span_err(
|
||||||
exp_span.to(self.prev_span),
|
exp_span.to(self.prev_span),
|
||||||
"cannot use a comma after the base struct",
|
"cannot use a comma after the base struct",
|
||||||
);
|
)
|
||||||
err.span_suggestion_short(
|
.span_suggestion_short(
|
||||||
self.token.span,
|
self.token.span,
|
||||||
"remove this comma",
|
"remove this comma",
|
||||||
String::new(),
|
String::new(),
|
||||||
Applicability::MachineApplicable
|
Applicability::MachineApplicable
|
||||||
);
|
)
|
||||||
err.note("the base struct must always be the last field");
|
.note("the base struct must always be the last field")
|
||||||
err.emit();
|
.emit();
|
||||||
self.recover_stmt();
|
self.recover_stmt();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -2736,15 +2736,14 @@ impl<'a> Parser<'a> {
|
||||||
let e = self.parse_prefix_expr(None);
|
let e = self.parse_prefix_expr(None);
|
||||||
let (span, e) = self.interpolated_or_expr_span(e)?;
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
||||||
let span_of_tilde = lo;
|
let span_of_tilde = lo;
|
||||||
let mut err = self.diagnostic()
|
self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator")
|
||||||
.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator");
|
.span_suggestion_short(
|
||||||
err.span_suggestion_short(
|
span_of_tilde,
|
||||||
span_of_tilde,
|
"use `!` to perform bitwise negation",
|
||||||
"use `!` to perform bitwise negation",
|
"!".to_owned(),
|
||||||
"!".to_owned(),
|
Applicability::MachineApplicable
|
||||||
Applicability::MachineApplicable
|
)
|
||||||
);
|
.emit();
|
||||||
err.emit();
|
|
||||||
(lo.to(span), self.mk_unary(UnOp::Not, e))
|
(lo.to(span), self.mk_unary(UnOp::Not, e))
|
||||||
}
|
}
|
||||||
token::BinOp(token::Minus) => {
|
token::BinOp(token::Minus) => {
|
||||||
|
@ -2792,21 +2791,20 @@ impl<'a> Parser<'a> {
|
||||||
if cannot_continue_expr {
|
if cannot_continue_expr {
|
||||||
self.bump();
|
self.bump();
|
||||||
// Emit the error ...
|
// Emit the error ...
|
||||||
let mut err = self.diagnostic()
|
self.struct_span_err(
|
||||||
.struct_span_err(self.token.span,
|
self.token.span,
|
||||||
&format!("unexpected {} after identifier",
|
&format!("unexpected {} after identifier",self.this_token_descr())
|
||||||
self.this_token_descr()));
|
)
|
||||||
// span the `not` plus trailing whitespace to avoid
|
.span_suggestion_short(
|
||||||
// trailing whitespace after the `!` in our suggestion
|
// Span the `not` plus trailing whitespace to avoid
|
||||||
let to_replace = self.sess.source_map()
|
// trailing whitespace after the `!` in our suggestion
|
||||||
.span_until_non_whitespace(lo.to(self.token.span));
|
self.sess.source_map()
|
||||||
err.span_suggestion_short(
|
.span_until_non_whitespace(lo.to(self.token.span)),
|
||||||
to_replace,
|
|
||||||
"use `!` to perform logical negation",
|
"use `!` to perform logical negation",
|
||||||
"!".to_owned(),
|
"!".to_owned(),
|
||||||
Applicability::MachineApplicable
|
Applicability::MachineApplicable
|
||||||
);
|
)
|
||||||
err.emit();
|
.emit();
|
||||||
// —and recover! (just as if we were in the block
|
// —and recover! (just as if we were in the block
|
||||||
// for the `token::Not` arm)
|
// for the `token::Not` arm)
|
||||||
let e = self.parse_prefix_expr(None);
|
let e = self.parse_prefix_expr(None);
|
||||||
|
@ -2884,7 +2882,7 @@ impl<'a> Parser<'a> {
|
||||||
// We've found an expression that would be parsed as a statement, but the next
|
// We've found an expression that would be parsed as a statement, but the next
|
||||||
// token implies this should be parsed as an expression.
|
// token implies this should be parsed as an expression.
|
||||||
// For example: `if let Some(x) = x { x } else { 0 } / 2`
|
// For example: `if let Some(x) = x { x } else { 0 } / 2`
|
||||||
let mut err = self.sess.span_diagnostic.struct_span_err(self.token.span, &format!(
|
let mut err = self.struct_span_err(self.token.span, &format!(
|
||||||
"expected expression, found `{}`",
|
"expected expression, found `{}`",
|
||||||
pprust::token_to_string(&self.token),
|
pprust::token_to_string(&self.token),
|
||||||
));
|
));
|
||||||
|
@ -3072,28 +3070,29 @@ impl<'a> Parser<'a> {
|
||||||
// in AST and continue parsing.
|
// in AST and continue parsing.
|
||||||
let msg = format!("`<` is interpreted as a start of generic \
|
let msg = format!("`<` is interpreted as a start of generic \
|
||||||
arguments for `{}`, not a {}", path, op_noun);
|
arguments for `{}`, not a {}", path, op_noun);
|
||||||
let mut err =
|
|
||||||
self.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
|
|
||||||
let span_after_type = parser_snapshot_after_type.token.span;
|
let span_after_type = parser_snapshot_after_type.token.span;
|
||||||
err.span_label(self.look_ahead(1, |t| t.span).to(span_after_type),
|
|
||||||
"interpreted as generic arguments");
|
|
||||||
err.span_label(self.token.span, format!("not interpreted as {}", op_noun));
|
|
||||||
|
|
||||||
let expr = mk_expr(self, P(Ty {
|
let expr = mk_expr(self, P(Ty {
|
||||||
span: path.span,
|
span: path.span,
|
||||||
node: TyKind::Path(None, path),
|
node: TyKind::Path(None, path),
|
||||||
id: ast::DUMMY_NODE_ID
|
id: ast::DUMMY_NODE_ID
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let expr_str = self.sess.source_map().span_to_snippet(expr.span)
|
let expr_str = self.span_to_snippet(expr.span)
|
||||||
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
||||||
err.span_suggestion(
|
|
||||||
expr.span,
|
self.struct_span_err(self.token.span, &msg)
|
||||||
&format!("try {} the cast value", op_verb),
|
.span_label(
|
||||||
format!("({})", expr_str),
|
self.look_ahead(1, |t| t.span).to(span_after_type),
|
||||||
Applicability::MachineApplicable
|
"interpreted as generic arguments"
|
||||||
);
|
)
|
||||||
err.emit();
|
.span_label(self.token.span, format!("not interpreted as {}", op_noun))
|
||||||
|
.span_suggestion(
|
||||||
|
expr.span,
|
||||||
|
&format!("try {} the cast value", op_verb),
|
||||||
|
format!("({})", expr_str),
|
||||||
|
Applicability::MachineApplicable
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
|
||||||
Ok(expr)
|
Ok(expr)
|
||||||
}
|
}
|
||||||
|
@ -3276,26 +3275,40 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
|
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
|
||||||
fn parse_for_expr(&mut self, opt_label: Option<Label>,
|
fn parse_for_expr(
|
||||||
span_lo: Span,
|
&mut self,
|
||||||
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
opt_label: Option<Label>,
|
||||||
|
span_lo: Span,
|
||||||
|
mut attrs: ThinVec<Attribute>
|
||||||
|
) -> PResult<'a, P<Expr>> {
|
||||||
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
|
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
|
||||||
|
|
||||||
|
// Record whether we are about to parse `for (`.
|
||||||
|
// This is used below for recovery in case of `for ( $stuff ) $block`
|
||||||
|
// in which case we will suggest `for $stuff $block`.
|
||||||
|
let begin_paren = match self.token.kind {
|
||||||
|
token::OpenDelim(token::Paren) => Some(self.token.span),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
let pat = self.parse_top_level_pat()?;
|
let pat = self.parse_top_level_pat()?;
|
||||||
if !self.eat_keyword(kw::In) {
|
if !self.eat_keyword(kw::In) {
|
||||||
let in_span = self.prev_span.between(self.token.span);
|
let in_span = self.prev_span.between(self.token.span);
|
||||||
let mut err = self.sess.span_diagnostic
|
self.struct_span_err(in_span, "missing `in` in `for` loop")
|
||||||
.struct_span_err(in_span, "missing `in` in `for` loop");
|
.span_suggestion_short(
|
||||||
err.span_suggestion_short(
|
in_span,
|
||||||
in_span, "try adding `in` here", " in ".into(),
|
"try adding `in` here", " in ".into(),
|
||||||
// has been misleading, at least in the past (closed Issue #48492)
|
// has been misleading, at least in the past (closed Issue #48492)
|
||||||
Applicability::MaybeIncorrect
|
Applicability::MaybeIncorrect
|
||||||
);
|
)
|
||||||
err.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
let in_span = self.prev_span;
|
let in_span = self.prev_span;
|
||||||
self.check_for_for_in_in_typo(in_span);
|
self.check_for_for_in_in_typo(in_span);
|
||||||
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
||||||
|
|
||||||
|
let pat = self.recover_parens_around_for_head(pat, &expr, begin_paren);
|
||||||
|
|
||||||
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
|
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
|
||||||
attrs.extend(iattrs);
|
attrs.extend(iattrs);
|
||||||
|
|
||||||
|
@ -3522,15 +3535,14 @@ impl<'a> Parser<'a> {
|
||||||
pats.push(self.parse_top_level_pat()?);
|
pats.push(self.parse_top_level_pat()?);
|
||||||
|
|
||||||
if self.token == token::OrOr {
|
if self.token == token::OrOr {
|
||||||
let mut err = self.struct_span_err(self.token.span,
|
self.struct_span_err(self.token.span, "unexpected token `||` after pattern")
|
||||||
"unexpected token `||` after pattern");
|
.span_suggestion(
|
||||||
err.span_suggestion(
|
self.token.span,
|
||||||
self.token.span,
|
"use a single `|` to specify multiple patterns",
|
||||||
"use a single `|` to specify multiple patterns",
|
"|".to_owned(),
|
||||||
"|".to_owned(),
|
Applicability::MachineApplicable
|
||||||
Applicability::MachineApplicable
|
)
|
||||||
);
|
.emit();
|
||||||
err.emit();
|
|
||||||
self.bump();
|
self.bump();
|
||||||
} else if self.eat(&token::BinOp(token::Or)) {
|
} else if self.eat(&token::BinOp(token::Or)) {
|
||||||
// This is a No-op. Continue the loop to parse the next
|
// This is a No-op. Continue the loop to parse the next
|
||||||
|
@ -3627,15 +3639,14 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
if self.token == token::DotDotDot { // Issue #46718
|
if self.token == token::DotDotDot { // Issue #46718
|
||||||
// Accept `...` as if it were `..` to avoid further errors
|
// Accept `...` as if it were `..` to avoid further errors
|
||||||
let mut err = self.struct_span_err(self.token.span,
|
self.struct_span_err(self.token.span, "expected field pattern, found `...`")
|
||||||
"expected field pattern, found `...`");
|
.span_suggestion(
|
||||||
err.span_suggestion(
|
self.token.span,
|
||||||
self.token.span,
|
"to omit remaining fields, use one fewer `.`",
|
||||||
"to omit remaining fields, use one fewer `.`",
|
"..".to_owned(),
|
||||||
"..".to_owned(),
|
Applicability::MachineApplicable
|
||||||
Applicability::MachineApplicable
|
)
|
||||||
);
|
.emit();
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
self.bump(); // `..` || `...`
|
self.bump(); // `..` || `...`
|
||||||
|
|
||||||
|
@ -3788,7 +3799,7 @@ impl<'a> Parser<'a> {
|
||||||
let seq_span = pat.span.to(self.prev_span);
|
let seq_span = pat.span.to(self.prev_span);
|
||||||
let mut err = self.struct_span_err(comma_span,
|
let mut err = self.struct_span_err(comma_span,
|
||||||
"unexpected `,` in pattern");
|
"unexpected `,` in pattern");
|
||||||
if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) {
|
if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
seq_span,
|
seq_span,
|
||||||
"try adding parentheses to match on a tuple..",
|
"try adding parentheses to match on a tuple..",
|
||||||
|
@ -4137,7 +4148,7 @@ impl<'a> Parser<'a> {
|
||||||
let parser_snapshot_after_type = self.clone();
|
let parser_snapshot_after_type = self.clone();
|
||||||
mem::replace(self, parser_snapshot_before_type);
|
mem::replace(self, parser_snapshot_before_type);
|
||||||
|
|
||||||
let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap();
|
let snippet = self.span_to_snippet(pat.span).unwrap();
|
||||||
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
|
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
|
||||||
(Some((parser_snapshot_after_type, colon_sp, err)), None)
|
(Some((parser_snapshot_after_type, colon_sp, err)), None)
|
||||||
}
|
}
|
||||||
|
@ -4557,7 +4568,7 @@ impl<'a> Parser<'a> {
|
||||||
if self.eat(&token::Semi) {
|
if self.eat(&token::Semi) {
|
||||||
stmt_span = stmt_span.with_hi(self.prev_span.hi());
|
stmt_span = stmt_span.with_hi(self.prev_span.hi());
|
||||||
}
|
}
|
||||||
if let Ok(snippet) = self.sess.source_map().span_to_snippet(stmt_span) {
|
if let Ok(snippet) = self.span_to_snippet(stmt_span) {
|
||||||
e.span_suggestion(
|
e.span_suggestion(
|
||||||
stmt_span,
|
stmt_span,
|
||||||
"try placing this code inside a block",
|
"try placing this code inside a block",
|
||||||
|
@ -4730,7 +4741,7 @@ impl<'a> Parser<'a> {
|
||||||
lo.to(self.prev_span),
|
lo.to(self.prev_span),
|
||||||
"parenthesized lifetime bounds are not supported"
|
"parenthesized lifetime bounds are not supported"
|
||||||
);
|
);
|
||||||
if let Ok(snippet) = self.sess.source_map().span_to_snippet(inner_span) {
|
if let Ok(snippet) = self.span_to_snippet(inner_span) {
|
||||||
err.span_suggestion_short(
|
err.span_suggestion_short(
|
||||||
lo.to(self.prev_span),
|
lo.to(self.prev_span),
|
||||||
"remove the parentheses",
|
"remove the parentheses",
|
||||||
|
@ -4788,7 +4799,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut new_bound_list = String::new();
|
let mut new_bound_list = String::new();
|
||||||
if !bounds.is_empty() {
|
if !bounds.is_empty() {
|
||||||
let mut snippets = bounds.iter().map(|bound| bound.span())
|
let mut snippets = bounds.iter().map(|bound| bound.span())
|
||||||
.map(|span| self.sess.source_map().span_to_snippet(span));
|
.map(|span| self.span_to_snippet(span));
|
||||||
while let Some(Ok(snippet)) = snippets.next() {
|
while let Some(Ok(snippet)) = snippets.next() {
|
||||||
new_bound_list.push_str(" + ");
|
new_bound_list.push_str(" + ");
|
||||||
new_bound_list.push_str(&snippet);
|
new_bound_list.push_str(&snippet);
|
||||||
|
@ -5853,15 +5864,16 @@ impl<'a> Parser<'a> {
|
||||||
if let token::DocComment(_) = self.token.kind {
|
if let token::DocComment(_) = self.token.kind {
|
||||||
if self.look_ahead(1,
|
if self.look_ahead(1,
|
||||||
|tok| tok == &token::CloseDelim(token::Brace)) {
|
|tok| tok == &token::CloseDelim(token::Brace)) {
|
||||||
let mut err = self.diagnostic().struct_span_err_with_code(
|
self.diagnostic().struct_span_err_with_code(
|
||||||
self.token.span,
|
self.token.span,
|
||||||
"found a documentation comment that doesn't document anything",
|
"found a documentation comment that doesn't document anything",
|
||||||
DiagnosticId::Error("E0584".into()),
|
DiagnosticId::Error("E0584".into()),
|
||||||
);
|
)
|
||||||
err.help("doc comments must come before what they document, maybe a \
|
.help(
|
||||||
|
"doc comments must come before what they document, maybe a \
|
||||||
comment was intended with `//`?",
|
comment was intended with `//`?",
|
||||||
);
|
)
|
||||||
err.emit();
|
.emit();
|
||||||
self.bump();
|
self.bump();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -6305,12 +6317,15 @@ impl<'a> Parser<'a> {
|
||||||
let sp = path.span;
|
let sp = path.span;
|
||||||
let help_msg = format!("make this visible only to module `{}` with `in`", path);
|
let help_msg = format!("make this visible only to module `{}` with `in`", path);
|
||||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||||
let mut err = struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg);
|
struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg)
|
||||||
err.help(suggestion);
|
.help(suggestion)
|
||||||
err.span_suggestion(
|
.span_suggestion(
|
||||||
sp, &help_msg, format!("in {}", path), Applicability::MachineApplicable
|
sp,
|
||||||
);
|
&help_msg,
|
||||||
err.emit(); // emit diagnostic, but continue with public visibility
|
format!("in {}", path),
|
||||||
|
Applicability::MachineApplicable,
|
||||||
|
)
|
||||||
|
.emit(); // emit diagnostic, but continue with public visibility
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6744,14 +6759,10 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
|
ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
|
||||||
|
|
||||||
let mut err = self.struct_span_err(fixed_name_sp, error_msg);
|
self.struct_span_err(fixed_name_sp, error_msg)
|
||||||
err.span_label(fixed_name_sp, "dash-separated idents are not valid");
|
.span_label(fixed_name_sp, "dash-separated idents are not valid")
|
||||||
err.multipart_suggestion(
|
.multipart_suggestion(suggestion_msg, replacement, Applicability::MachineApplicable)
|
||||||
suggestion_msg,
|
.emit();
|
||||||
replacement,
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
Ok(ident)
|
Ok(ident)
|
||||||
}
|
}
|
||||||
|
@ -6906,14 +6917,14 @@ impl<'a> Parser<'a> {
|
||||||
if !self.eat(&token::Comma) {
|
if !self.eat(&token::Comma) {
|
||||||
if self.token.is_ident() && !self.token.is_reserved_ident() {
|
if self.token.is_ident() && !self.token.is_reserved_ident() {
|
||||||
let sp = self.sess.source_map().next_point(self.prev_span);
|
let sp = self.sess.source_map().next_point(self.prev_span);
|
||||||
let mut err = self.struct_span_err(sp, "missing comma");
|
self.struct_span_err(sp, "missing comma")
|
||||||
err.span_suggestion_short(
|
.span_suggestion_short(
|
||||||
sp,
|
sp,
|
||||||
"missing comma",
|
"missing comma",
|
||||||
",".to_owned(),
|
",".to_owned(),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
)
|
||||||
err.emit();
|
.emit();
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -6952,15 +6963,16 @@ impl<'a> Parser<'a> {
|
||||||
Some(abi) => Ok(Some(abi)),
|
Some(abi) => Ok(Some(abi)),
|
||||||
None => {
|
None => {
|
||||||
let prev_span = self.prev_span;
|
let prev_span = self.prev_span;
|
||||||
let mut err = struct_span_err!(
|
struct_span_err!(
|
||||||
self.sess.span_diagnostic,
|
self.sess.span_diagnostic,
|
||||||
prev_span,
|
prev_span,
|
||||||
E0703,
|
E0703,
|
||||||
"invalid ABI: found `{}`",
|
"invalid ABI: found `{}`",
|
||||||
symbol);
|
symbol
|
||||||
err.span_label(prev_span, "invalid ABI");
|
)
|
||||||
err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
|
.span_label(prev_span, "invalid ABI")
|
||||||
err.emit();
|
.help(&format!("valid ABIs: {}", abi::all_names().join(", ")))
|
||||||
|
.emit();
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7130,16 +7142,15 @@ impl<'a> Parser<'a> {
|
||||||
// CONST ITEM
|
// CONST ITEM
|
||||||
if self.eat_keyword(kw::Mut) {
|
if self.eat_keyword(kw::Mut) {
|
||||||
let prev_span = self.prev_span;
|
let prev_span = self.prev_span;
|
||||||
let mut err = self.diagnostic()
|
self.struct_span_err(prev_span, "const globals cannot be mutable")
|
||||||
.struct_span_err(prev_span, "const globals cannot be mutable");
|
.span_label(prev_span, "cannot be mutable")
|
||||||
err.span_label(prev_span, "cannot be mutable");
|
.span_suggestion(
|
||||||
err.span_suggestion(
|
const_span,
|
||||||
const_span,
|
"you might want to declare a static instead",
|
||||||
"you might want to declare a static instead",
|
"static".to_owned(),
|
||||||
"static".to_owned(),
|
Applicability::MaybeIncorrect,
|
||||||
Applicability::MaybeIncorrect,
|
)
|
||||||
);
|
.emit();
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
|
let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
|
||||||
let prev_span = self.prev_span;
|
let prev_span = self.prev_span;
|
||||||
|
@ -7407,7 +7418,7 @@ impl<'a> Parser<'a> {
|
||||||
sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
|
sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) {
|
if let Ok(snippet) = self.span_to_snippet(ident_sp) {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
full_sp,
|
full_sp,
|
||||||
"if you meant to call a macro, try",
|
"if you meant to call a macro, try",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
name = "example"
|
name = "example"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Hideki Sekine <sekineh@me.com>"]
|
authors = ["Hideki Sekine <sekineh@me.com>"]
|
||||||
# edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cortex-m = "0.5.4"
|
cortex-m = "0.5.4"
|
||||||
|
|
|
@ -1,16 +1,14 @@
|
||||||
// #![feature(stdsimd)]
|
// #![feature(stdsimd)]
|
||||||
#![no_main]
|
#![no_main]
|
||||||
#![no_std]
|
#![no_std]
|
||||||
|
|
||||||
extern crate cortex_m;
|
|
||||||
|
|
||||||
extern crate cortex_m_rt as rt;
|
|
||||||
extern crate cortex_m_semihosting as semihosting;
|
|
||||||
extern crate panic_halt;
|
|
||||||
|
|
||||||
use core::fmt::Write;
|
use core::fmt::Write;
|
||||||
use cortex_m::asm;
|
use cortex_m::asm;
|
||||||
use rt::entry;
|
use cortex_m_rt::entry;
|
||||||
|
use cortex_m_semihosting as semihosting;
|
||||||
|
|
||||||
|
//FIXME: This imports the provided #[panic_handler].
|
||||||
|
#[allow(rust_2018_idioms)]
|
||||||
|
extern crate panic_halt;
|
||||||
|
|
||||||
entry!(main);
|
entry!(main);
|
||||||
|
|
||||||
|
@ -22,7 +20,7 @@ fn main() -> ! {
|
||||||
|
|
||||||
// write something through semihosting interface
|
// write something through semihosting interface
|
||||||
let mut hstdout = semihosting::hio::hstdout().unwrap();
|
let mut hstdout = semihosting::hio::hstdout().unwrap();
|
||||||
write!(hstdout, "x = {}\n", x);
|
let _ = write!(hstdout, "x = {}\n", x);
|
||||||
|
|
||||||
// exit from qemu
|
// exit from qemu
|
||||||
semihosting::debug::exit(semihosting::debug::EXIT_SUCCESS);
|
semihosting::debug::exit(semihosting::debug::EXIT_SUCCESS);
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
// Checks to ensure that we properly detect when a closure constrains an existential type
|
||||||
|
#![feature(existential_type)]
|
||||||
|
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
existential type Existential: Debug;
|
||||||
|
fn _unused() -> Existential { String::new() }
|
||||||
|
//~^ ERROR: concrete type differs from previous defining existential type use
|
||||||
|
let null = || -> Existential { 0 };
|
||||||
|
println!("{:?}", null());
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
error: concrete type differs from previous defining existential type use
|
||||||
|
--> $DIR/issue-52843-closure-constrain.rs:8:5
|
||||||
|
|
|
||||||
|
LL | fn _unused() -> Existential { String::new() }
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `i32`, got `std::string::String`
|
||||||
|
|
|
||||||
|
note: previous use here
|
||||||
|
--> $DIR/issue-52843-closure-constrain.rs:6:1
|
||||||
|
|
|
||||||
|
LL | / fn main() {
|
||||||
|
LL | | existential type Existential: Debug;
|
||||||
|
LL | | fn _unused() -> Existential { String::new() }
|
||||||
|
LL | |
|
||||||
|
LL | | let null = || -> Existential { 0 };
|
||||||
|
LL | | println!("{:?}", null());
|
||||||
|
LL | | }
|
||||||
|
| |_^
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![feature(const_fn, generators, generator_trait, existential_type)]
|
||||||
|
|
||||||
|
use std::ops::Generator;
|
||||||
|
|
||||||
|
existential type GenOnce<Y, R>: Generator<Yield = Y, Return = R>;
|
||||||
|
|
||||||
|
const fn const_generator<Y, R>(yielding: Y, returning: R) -> GenOnce<Y, R> {
|
||||||
|
move || {
|
||||||
|
yield yielding;
|
||||||
|
|
||||||
|
return returning;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const FOO: GenOnce<usize, usize> = const_generator(10, 100);
|
||||||
|
|
||||||
|
fn main() {}
|
15
src/test/ui/existential_types/issue-60407.rs
Normal file
15
src/test/ui/existential_types/issue-60407.rs
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![feature(existential_type)]
|
||||||
|
|
||||||
|
existential type Debuggable: core::fmt::Debug;
|
||||||
|
|
||||||
|
static mut TEST: Option<Debuggable> = None;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
unsafe { TEST = Some(foo()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo() -> Debuggable {
|
||||||
|
0u32
|
||||||
|
}
|
26
src/test/ui/existential_types/issue-60564.rs
Normal file
26
src/test/ui/existential_types/issue-60564.rs
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
#![feature(existential_type)]
|
||||||
|
|
||||||
|
trait IterBits {
|
||||||
|
type BitsIter: Iterator<Item = u8>;
|
||||||
|
fn iter_bits(self, n: u8) -> Self::BitsIter;
|
||||||
|
}
|
||||||
|
|
||||||
|
existential type IterBitsIter<T, E, I>: std::iter::Iterator<Item = I>;
|
||||||
|
//~^ ERROR could not find defining uses
|
||||||
|
|
||||||
|
impl<T, E> IterBits for T
|
||||||
|
where
|
||||||
|
T: std::ops::Shr<Output = T>
|
||||||
|
+ std::ops::BitAnd<T, Output = T>
|
||||||
|
+ std::convert::From<u8>
|
||||||
|
+ std::convert::TryInto<u8, Error = E>,
|
||||||
|
E: std::fmt::Debug,
|
||||||
|
{
|
||||||
|
type BitsIter = IterBitsIter<T, E, u8>;
|
||||||
|
fn iter_bits(self, n: u8) -> Self::BitsIter {
|
||||||
|
//~^ ERROR type parameter `E` is part of concrete type but not used
|
||||||
|
(0u8..n)
|
||||||
|
.rev()
|
||||||
|
.map(move |shift| ((self >> T::from(shift)) & T::from(1)).try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
25
src/test/ui/existential_types/issue-60564.stderr
Normal file
25
src/test/ui/existential_types/issue-60564.stderr
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
error[E0601]: `main` function not found in crate `issue_60564`
|
||||||
|
|
|
||||||
|
= note: consider adding a `main` function to `$DIR/issue-60564.rs`
|
||||||
|
|
||||||
|
error: type parameter `E` is part of concrete type but not used in parameter list for existential type
|
||||||
|
--> $DIR/issue-60564.rs:20:49
|
||||||
|
|
|
||||||
|
LL | fn iter_bits(self, n: u8) -> Self::BitsIter {
|
||||||
|
| _________________________________________________^
|
||||||
|
LL | |
|
||||||
|
LL | | (0u8..n)
|
||||||
|
LL | | .rev()
|
||||||
|
LL | | .map(move |shift| ((self >> T::from(shift)) & T::from(1)).try_into().unwrap())
|
||||||
|
LL | | }
|
||||||
|
| |_____^
|
||||||
|
|
||||||
|
error: could not find defining uses
|
||||||
|
--> $DIR/issue-60564.rs:8:1
|
||||||
|
|
|
||||||
|
LL | existential type IterBitsIter<T, E, I>: std::iter::Iterator<Item = I>;
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0601`.
|
19
src/test/ui/hygiene/duplicate_lifetimes.rs
Normal file
19
src/test/ui/hygiene/duplicate_lifetimes.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
// Ensure that lifetime parameter names are modernized before we check for
|
||||||
|
// duplicates.
|
||||||
|
|
||||||
|
#![feature(decl_macro, rustc_attrs)]
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "semitransparent"]
|
||||||
|
macro m($a:lifetime) {
|
||||||
|
fn g<$a, 'a>() {} //~ ERROR lifetime name `'a` declared twice
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "transparent"]
|
||||||
|
macro n($a:lifetime) {
|
||||||
|
fn h<$a, 'a>() {} //~ ERROR lifetime name `'a` declared twice
|
||||||
|
}
|
||||||
|
|
||||||
|
m!('a);
|
||||||
|
n!('a);
|
||||||
|
|
||||||
|
fn main() {}
|
27
src/test/ui/hygiene/duplicate_lifetimes.stderr
Normal file
27
src/test/ui/hygiene/duplicate_lifetimes.stderr
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
error[E0263]: lifetime name `'a` declared twice in the same scope
|
||||||
|
--> $DIR/duplicate_lifetimes.rs:8:14
|
||||||
|
|
|
||||||
|
LL | fn g<$a, 'a>() {}
|
||||||
|
| ^^ declared twice
|
||||||
|
...
|
||||||
|
LL | m!('a);
|
||||||
|
| -------
|
||||||
|
| | |
|
||||||
|
| | previous declaration here
|
||||||
|
| in this macro invocation
|
||||||
|
|
||||||
|
error[E0263]: lifetime name `'a` declared twice in the same scope
|
||||||
|
--> $DIR/duplicate_lifetimes.rs:13:14
|
||||||
|
|
|
||||||
|
LL | fn h<$a, 'a>() {}
|
||||||
|
| ^^ declared twice
|
||||||
|
...
|
||||||
|
LL | n!('a);
|
||||||
|
| -------
|
||||||
|
| | |
|
||||||
|
| | previous declaration here
|
||||||
|
| in this macro invocation
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0263`.
|
104
src/test/ui/hygiene/generic_params.rs
Normal file
104
src/test/ui/hygiene/generic_params.rs
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
// Ensure that generic parameters always have modern hygiene.
|
||||||
|
|
||||||
|
// check-pass
|
||||||
|
// ignore-pretty pretty-printing is unhygienic
|
||||||
|
|
||||||
|
#![feature(decl_macro, rustc_attrs, const_generics)]
|
||||||
|
|
||||||
|
mod type_params {
|
||||||
|
macro m($T:ident) {
|
||||||
|
fn f<$T: Clone, T: PartialEq>(t1: $T, t2: T) -> ($T, bool) {
|
||||||
|
(t1.clone(), t2 == t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "semitransparent"]
|
||||||
|
macro n($T:ident) {
|
||||||
|
fn g<$T: Clone>(t1: $T, t2: T) -> (T, $T) {
|
||||||
|
(t1.clone(), t2.clone())
|
||||||
|
}
|
||||||
|
fn h<T: Clone>(t1: $T, t2: T) -> (T, $T) {
|
||||||
|
(t1.clone(), t2.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "transparent"]
|
||||||
|
macro p($T:ident) {
|
||||||
|
fn j<$T: Clone>(t1: $T, t2: T) -> (T, $T) {
|
||||||
|
(t1.clone(), t2.clone())
|
||||||
|
}
|
||||||
|
fn k<T: Clone>(t1: $T, t2: T) -> (T, $T) {
|
||||||
|
(t1.clone(), t2.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m!(T);
|
||||||
|
n!(T);
|
||||||
|
p!(T);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod lifetime_params {
|
||||||
|
macro m($a:lifetime) {
|
||||||
|
fn f<'b, 'c, $a: 'b, 'a: 'c>(t1: &$a(), t2: &'a ()) -> (&'b (), &'c ()) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "semitransparent"]
|
||||||
|
macro n($a:lifetime) {
|
||||||
|
fn g<$a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
fn h<'a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "transparent"]
|
||||||
|
macro p($a:lifetime) {
|
||||||
|
fn j<$a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
fn k<'a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m!('a);
|
||||||
|
n!('a);
|
||||||
|
p!('a);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod const_params {
|
||||||
|
macro m($C:ident) {
|
||||||
|
fn f<const $C: usize, const C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); $C], [(); C]) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "semitransparent"]
|
||||||
|
macro n($C:ident) {
|
||||||
|
fn g<const $C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
fn h<const C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustc_macro_transparency = "transparent"]
|
||||||
|
macro p($C:ident) {
|
||||||
|
fn j<const $C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
fn k<const C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
|
||||||
|
(t1, t2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m!(C);
|
||||||
|
n!(C);
|
||||||
|
p!(C);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
6
src/test/ui/hygiene/generic_params.stderr
Normal file
6
src/test/ui/hygiene/generic_params.stderr
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
warning: the feature `const_generics` is incomplete and may cause the compiler to crash
|
||||||
|
--> $DIR/generic_params.rs:6:37
|
||||||
|
|
|
||||||
|
LL | #![feature(decl_macro, rustc_attrs, const_generics)]
|
||||||
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
32
src/test/ui/hygiene/issue-61574-const-parameters.rs
Normal file
32
src/test/ui/hygiene/issue-61574-const-parameters.rs
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
// A more comprehensive test that const parameters have correctly implemented
|
||||||
|
// hygiene
|
||||||
|
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![feature(const_generics)]
|
||||||
|
|
||||||
|
use std::ops::Add;
|
||||||
|
|
||||||
|
struct VectorLike<T, const SIZE: usize>([T; {SIZE}]);
|
||||||
|
|
||||||
|
macro_rules! impl_operator_overload {
|
||||||
|
($trait_ident:ident, $method_ident:ident) => {
|
||||||
|
|
||||||
|
impl<T, const SIZE: usize> $trait_ident for VectorLike<T, {SIZE}>
|
||||||
|
where
|
||||||
|
T: $trait_ident,
|
||||||
|
{
|
||||||
|
type Output = VectorLike<T, {SIZE}>;
|
||||||
|
|
||||||
|
fn $method_ident(self, _: VectorLike<T, {SIZE}>) -> VectorLike<T, {SIZE}> {
|
||||||
|
let _ = SIZE;
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_operator_overload!(Add, add);
|
||||||
|
|
||||||
|
fn main() {}
|
6
src/test/ui/hygiene/issue-61574-const-parameters.stderr
Normal file
6
src/test/ui/hygiene/issue-61574-const-parameters.stderr
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
warning: the feature `const_generics` is incomplete and may cause the compiler to crash
|
||||||
|
--> $DIR/issue-61574-const-parameters.rs:6:12
|
||||||
|
|
|
||||||
|
LL | #![feature(const_generics)]
|
||||||
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
// check-pass
|
|
||||||
// ignore-pretty pretty-printing is unhygienic
|
|
||||||
|
|
||||||
#![feature(decl_macro)]
|
|
||||||
|
|
||||||
macro m($T:ident) {
|
|
||||||
fn f<T, $T>(t: T, t2: $T) -> (T, $T) {
|
|
||||||
(t, t2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
m!(T);
|
|
||||||
|
|
||||||
fn main() {}
|
|
15
src/test/ui/parser/recover-for-loop-parens-around-head.rs
Normal file
15
src/test/ui/parser/recover-for-loop-parens-around-head.rs
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
// Here we test that the parser is able to recover in a situation like
|
||||||
|
// `for ( $pat in $expr )` since that is familiar syntax in other languages.
|
||||||
|
// Instead we suggest that the user writes `for $pat in $expr`.
|
||||||
|
|
||||||
|
#![deny(unused)] // Make sure we don't trigger `unused_parens`.
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let vec = vec![1, 2, 3];
|
||||||
|
|
||||||
|
for ( elem in vec ) {
|
||||||
|
//~^ ERROR expected one of `)`, `,`, or `@`, found `in`
|
||||||
|
//~| ERROR unexpected closing `)`
|
||||||
|
const RECOVERY_WITNESS: () = 0; //~ ERROR mismatched types
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
error: expected one of `)`, `,`, or `@`, found `in`
|
||||||
|
--> $DIR/recover-for-loop-parens-around-head.rs:10:16
|
||||||
|
|
|
||||||
|
LL | for ( elem in vec ) {
|
||||||
|
| ^^ expected one of `)`, `,`, or `@` here
|
||||||
|
|
||||||
|
error: unexpected closing `)`
|
||||||
|
--> $DIR/recover-for-loop-parens-around-head.rs:10:23
|
||||||
|
|
|
||||||
|
LL | for ( elem in vec ) {
|
||||||
|
| --------------^
|
||||||
|
| |
|
||||||
|
| opening `(`
|
||||||
|
| help: remove parenthesis in `for` loop: `elem in vec`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/recover-for-loop-parens-around-head.rs:13:38
|
||||||
|
|
|
||||||
|
LL | const RECOVERY_WITNESS: () = 0;
|
||||||
|
| ^ expected (), found integer
|
||||||
|
|
|
||||||
|
= note: expected type `()`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0308`.
|
82
src/test/ui/pattern/rest-pat-semantic-disallowed.rs
Normal file
82
src/test/ui/pattern/rest-pat-semantic-disallowed.rs
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
// Here we test that rest patterns, i.e. `..`, are not allowed
|
||||||
|
// outside of slice (+ ident patterns witin those), tuple,
|
||||||
|
// and tuple struct patterns and that duplicates are caught in these contexts.
|
||||||
|
|
||||||
|
#![feature(slice_patterns, box_patterns)]
|
||||||
|
|
||||||
|
fn main() {}
|
||||||
|
|
||||||
|
macro_rules! mk_pat {
|
||||||
|
() => { .. } //~ ERROR `..` patterns are not allowed here
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rest_patterns() {
|
||||||
|
let mk_pat!();
|
||||||
|
|
||||||
|
// Top level:
|
||||||
|
fn foo(..: u8) {} //~ ERROR `..` patterns are not allowed here
|
||||||
|
let ..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
|
||||||
|
// Box patterns:
|
||||||
|
let box ..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
|
||||||
|
// In or-patterns:
|
||||||
|
match 1 {
|
||||||
|
1 | .. => {} //~ ERROR `..` patterns are not allowed here
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ref patterns:
|
||||||
|
let &..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
let &mut ..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
|
||||||
|
// Ident patterns:
|
||||||
|
let x @ ..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
let ref x @ ..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
let ref mut x @ ..; //~ ERROR `..` patterns are not allowed here
|
||||||
|
|
||||||
|
// Tuple:
|
||||||
|
let (..): (u8,); // OK.
|
||||||
|
let (..,): (u8,); // OK.
|
||||||
|
let (
|
||||||
|
..,
|
||||||
|
.., //~ ERROR `..` can only be used once per tuple pattern
|
||||||
|
.. //~ ERROR `..` can only be used once per tuple pattern
|
||||||
|
): (u8, u8, u8);
|
||||||
|
let (
|
||||||
|
..,
|
||||||
|
x,
|
||||||
|
.. //~ ERROR `..` can only be used once per tuple pattern
|
||||||
|
): (u8, u8, u8);
|
||||||
|
|
||||||
|
struct A(u8, u8, u8);
|
||||||
|
|
||||||
|
// Tuple struct (same idea as for tuple patterns):
|
||||||
|
let A(..); // OK.
|
||||||
|
let A(..,); // OK.
|
||||||
|
let A(
|
||||||
|
..,
|
||||||
|
.., //~ ERROR `..` can only be used once per tuple struct pattern
|
||||||
|
.. //~ ERROR `..` can only be used once per tuple struct pattern
|
||||||
|
);
|
||||||
|
let A(
|
||||||
|
..,
|
||||||
|
x,
|
||||||
|
.. //~ ERROR `..` can only be used once per tuple struct pattern
|
||||||
|
);
|
||||||
|
|
||||||
|
// Array/Slice:
|
||||||
|
let [..]: &[u8]; // OK.
|
||||||
|
let [..,]: &[u8]; // OK.
|
||||||
|
let [
|
||||||
|
..,
|
||||||
|
.., //~ ERROR `..` can only be used once per slice pattern
|
||||||
|
.. //~ ERROR `..` can only be used once per slice pattern
|
||||||
|
]: &[u8];
|
||||||
|
let [
|
||||||
|
..,
|
||||||
|
ref x @ .., //~ ERROR `..` can only be used once per slice pattern
|
||||||
|
ref mut y @ .., //~ ERROR `..` can only be used once per slice pattern
|
||||||
|
(ref z @ ..), //~ ERROR `..` patterns are not allowed here
|
||||||
|
.. //~ ERROR `..` can only be used once per slice pattern
|
||||||
|
]: &[u8];
|
||||||
|
}
|
188
src/test/ui/pattern/rest-pat-semantic-disallowed.stderr
Normal file
188
src/test/ui/pattern/rest-pat-semantic-disallowed.stderr
Normal file
|
@ -0,0 +1,188 @@
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:10:13
|
||||||
|
|
|
||||||
|
LL | () => { .. }
|
||||||
|
| ^^
|
||||||
|
...
|
||||||
|
LL | let mk_pat!();
|
||||||
|
| --------- in this macro invocation
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:18:9
|
||||||
|
|
|
||||||
|
LL | let ..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:21:13
|
||||||
|
|
|
||||||
|
LL | let box ..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:25:13
|
||||||
|
|
|
||||||
|
LL | 1 | .. => {}
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:29:10
|
||||||
|
|
|
||||||
|
LL | let &..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:30:14
|
||||||
|
|
|
||||||
|
LL | let &mut ..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:33:13
|
||||||
|
|
|
||||||
|
LL | let x @ ..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:34:17
|
||||||
|
|
|
||||||
|
LL | let ref x @ ..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:35:21
|
||||||
|
|
|
||||||
|
LL | let ref mut x @ ..;
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` can only be used once per tuple pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:42:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ..,
|
||||||
|
| ^^ can only be used once per tuple pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per tuple pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:43:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ..,
|
||||||
|
LL | ..
|
||||||
|
| ^^ can only be used once per tuple pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per tuple pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:48:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | x,
|
||||||
|
LL | ..
|
||||||
|
| ^^ can only be used once per tuple pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per tuple struct pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:58:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ..,
|
||||||
|
| ^^ can only be used once per tuple struct pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per tuple struct pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:59:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ..,
|
||||||
|
LL | ..
|
||||||
|
| ^^ can only be used once per tuple struct pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per tuple struct pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:64:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | x,
|
||||||
|
LL | ..
|
||||||
|
| ^^ can only be used once per tuple struct pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per slice pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:72:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ..,
|
||||||
|
| ^^ can only be used once per slice pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per slice pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:73:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ..,
|
||||||
|
LL | ..
|
||||||
|
| ^^ can only be used once per slice pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per slice pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:77:17
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ref x @ ..,
|
||||||
|
| ^^ can only be used once per slice pattern
|
||||||
|
|
||||||
|
error: `..` can only be used once per slice pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:78:21
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
LL | ref x @ ..,
|
||||||
|
LL | ref mut y @ ..,
|
||||||
|
| ^^ can only be used once per slice pattern
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:79:18
|
||||||
|
|
|
||||||
|
LL | (ref z @ ..),
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: `..` can only be used once per slice pattern
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:80:9
|
||||||
|
|
|
||||||
|
LL | ..,
|
||||||
|
| -- previously used here
|
||||||
|
...
|
||||||
|
LL | ..
|
||||||
|
| ^^ can only be used once per slice pattern
|
||||||
|
|
||||||
|
error: `..` patterns are not allowed here
|
||||||
|
--> $DIR/rest-pat-semantic-disallowed.rs:17:12
|
||||||
|
|
|
||||||
|
LL | fn foo(..: u8) {}
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= note: only allowed in tuple, tuple struct, and slice patterns
|
||||||
|
|
||||||
|
error: aborting due to 22 previous errors
|
||||||
|
|
70
src/test/ui/pattern/rest-pat-syntactic.rs
Normal file
70
src/test/ui/pattern/rest-pat-syntactic.rs
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
// Here we test that `..` is allowed in all pattern locations *syntactically*.
|
||||||
|
// The semantic test is in `rest-pat-semantic-disallowed.rs`.
|
||||||
|
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
fn main() {}
|
||||||
|
|
||||||
|
macro_rules! accept_pat {
|
||||||
|
($p:pat) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
accept_pat!(..);
|
||||||
|
|
||||||
|
#[cfg(FALSE)]
|
||||||
|
fn rest_patterns() {
|
||||||
|
// Top level:
|
||||||
|
fn foo(..: u8) {}
|
||||||
|
let ..;
|
||||||
|
|
||||||
|
// Box patterns:
|
||||||
|
let box ..;
|
||||||
|
|
||||||
|
// In or-patterns:
|
||||||
|
match x {
|
||||||
|
.. | .. => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ref patterns:
|
||||||
|
let &..;
|
||||||
|
let &mut ..;
|
||||||
|
|
||||||
|
// Ident patterns:
|
||||||
|
let x @ ..;
|
||||||
|
let ref x @ ..;
|
||||||
|
let ref mut x @ ..;
|
||||||
|
|
||||||
|
// Tuple:
|
||||||
|
let (..); // This is interpreted as a tuple pattern, not a parenthesis one.
|
||||||
|
let (..,); // Allowing trailing comma.
|
||||||
|
let (.., .., ..); // Duplicates also.
|
||||||
|
let (.., P, ..); // Including with things in between.
|
||||||
|
|
||||||
|
// Tuple struct (same idea as for tuple patterns):
|
||||||
|
let A(..);
|
||||||
|
let A(..,);
|
||||||
|
let A(.., .., ..);
|
||||||
|
let A(.., P, ..);
|
||||||
|
|
||||||
|
// Array/Slice (like with tuple patterns):
|
||||||
|
let [..];
|
||||||
|
let [..,];
|
||||||
|
let [.., .., ..];
|
||||||
|
let [.., P, ..];
|
||||||
|
|
||||||
|
// Random walk to guard against special casing:
|
||||||
|
match x {
|
||||||
|
.. |
|
||||||
|
[
|
||||||
|
(
|
||||||
|
box ..,
|
||||||
|
&(..),
|
||||||
|
&mut ..,
|
||||||
|
x @ ..
|
||||||
|
),
|
||||||
|
ref x @ ..,
|
||||||
|
] |
|
||||||
|
ref mut x @ ..
|
||||||
|
=> {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ license = 'MIT OR Apache-2.0'
|
||||||
description = """
|
description = """
|
||||||
Hack for the compiler's own build system
|
Hack for the compiler's own build system
|
||||||
"""
|
"""
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
45
src/tools/tidy/src/edition.rs
Normal file
45
src/tools/tidy/src/edition.rs
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
//! Tidy check to ensure that crate `edition` is '2018'
|
||||||
|
//!
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
fn filter_dirs(path: &Path) -> bool {
|
||||||
|
// FIXME: just use super::filter_dirs after the submodules are updated.
|
||||||
|
if super::filter_dirs(path) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
let skip = [
|
||||||
|
"src/doc/book/second-edition",
|
||||||
|
"src/doc/book/2018-edition",
|
||||||
|
"src/doc/book/ci/stable-check",
|
||||||
|
"src/doc/reference/stable-check",
|
||||||
|
];
|
||||||
|
skip.iter().any(|p| path.ends_with(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_edition_2018(mut line: &str) -> bool {
|
||||||
|
line = line.trim();
|
||||||
|
line == "edition = \"2018\"" || line == "edition = \'2018\'"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check(path: &Path, bad: &mut bool) {
|
||||||
|
super::walk(
|
||||||
|
path,
|
||||||
|
&mut |path| filter_dirs(path) || path.ends_with("src/test"),
|
||||||
|
&mut |entry, contents| {
|
||||||
|
let file = entry.path();
|
||||||
|
let filename = file.file_name().unwrap();
|
||||||
|
if filename != "Cargo.toml" {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let has_edition = contents.lines().any(is_edition_2018);
|
||||||
|
if !has_edition {
|
||||||
|
tidy_error!(
|
||||||
|
bad,
|
||||||
|
"{} doesn't have `edition = \"2018\"` on a separate line",
|
||||||
|
file.display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
|
@ -34,6 +34,7 @@ pub mod style;
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod features;
|
pub mod features;
|
||||||
pub mod cargo;
|
pub mod cargo;
|
||||||
|
pub mod edition;
|
||||||
pub mod pal;
|
pub mod pal;
|
||||||
pub mod deps;
|
pub mod deps;
|
||||||
pub mod extdeps;
|
pub mod extdeps;
|
||||||
|
|
|
@ -22,6 +22,7 @@ fn main() {
|
||||||
style::check(&path, &mut bad);
|
style::check(&path, &mut bad);
|
||||||
errors::check(&path, &mut bad);
|
errors::check(&path, &mut bad);
|
||||||
cargo::check(&path, &mut bad);
|
cargo::check(&path, &mut bad);
|
||||||
|
edition::check(&path, &mut bad);
|
||||||
let collected = features::check(&path, &mut bad, verbose);
|
let collected = features::check(&path, &mut bad, verbose);
|
||||||
pal::check(&path, &mut bad);
|
pal::check(&path, &mut bad);
|
||||||
unstable_book::check(&path, collected, &mut bad);
|
unstable_book::check(&path, collected, &mut bad);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue