Auto merge of #80503 - JohnTitor:rollup-b26vglu, r=JohnTitor
Rollup of 13 pull requests Successful merges: - #79812 (Lint on redundant trailing semicolon after item) - #80348 (remove redundant clones (clippy::redundant_clone)) - #80358 (Edit rustc_span documentation) - #80457 (Add missing commas to `rustc_ast_pretty::pp` docs) - #80461 (Add llvm-libunwind change to bootstrap CHANGELOG) - #80464 (Use Option::map_or instead of open coding it) - #80465 (Fix typo in ffi-pure.md) - #80467 (More uses of the matches! macro) - #80469 (Fix small typo in time comment) - #80472 (Use sans-serif font for the "all items" page links) - #80477 (Make forget intrinsic safe) - #80482 (don't clone copy types) - #80487 (don't redundantly repeat field names) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
d107a87d34
39 changed files with 164 additions and 142 deletions
|
@ -1092,15 +1092,9 @@ impl Expr {
|
||||||
if let ExprKind::Block(ref block, _) = self.kind {
|
if let ExprKind::Block(ref block, _) = self.kind {
|
||||||
match block.stmts.last().map(|last_stmt| &last_stmt.kind) {
|
match block.stmts.last().map(|last_stmt| &last_stmt.kind) {
|
||||||
// Implicit return
|
// Implicit return
|
||||||
Some(&StmtKind::Expr(_)) => true,
|
Some(StmtKind::Expr(_)) => true,
|
||||||
Some(&StmtKind::Semi(ref expr)) => {
|
// Last statement is an explicit return?
|
||||||
if let ExprKind::Ret(_) = expr.kind {
|
Some(StmtKind::Semi(expr)) => matches!(expr.kind, ExprKind::Ret(_)),
|
||||||
// Last statement is explicit return.
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// This is a block that doesn't end in either an implicit or explicit return.
|
// This is a block that doesn't end in either an implicit or explicit return.
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
@ -1950,7 +1944,7 @@ impl TyKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_unit(&self) -> bool {
|
pub fn is_unit(&self) -> bool {
|
||||||
if let TyKind::Tup(ref tys) = *self { tys.is_empty() } else { false }
|
matches!(self, TyKind::Tup(tys) if tys.is_empty())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1857,12 +1857,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
output,
|
output,
|
||||||
c_variadic,
|
c_variadic,
|
||||||
implicit_self: decl.inputs.get(0).map_or(hir::ImplicitSelfKind::None, |arg| {
|
implicit_self: decl.inputs.get(0).map_or(hir::ImplicitSelfKind::None, |arg| {
|
||||||
let is_mutable_pat = match arg.pat.kind {
|
use BindingMode::{ByRef, ByValue};
|
||||||
PatKind::Ident(BindingMode::ByValue(mt) | BindingMode::ByRef(mt), _, _) => {
|
let is_mutable_pat = matches!(
|
||||||
mt == Mutability::Mut
|
arg.pat.kind,
|
||||||
}
|
PatKind::Ident(ByValue(Mutability::Mut) | ByRef(Mutability::Mut), ..)
|
||||||
_ => false,
|
);
|
||||||
};
|
|
||||||
|
|
||||||
match arg.ty.kind {
|
match arg.ty.kind {
|
||||||
TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut,
|
TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut,
|
||||||
|
|
|
@ -397,10 +397,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||||
match i.kind {
|
match i.kind {
|
||||||
ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => {
|
ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => {
|
||||||
let link_name = self.sess.first_attr_value_str_by_name(&i.attrs, sym::link_name);
|
let link_name = self.sess.first_attr_value_str_by_name(&i.attrs, sym::link_name);
|
||||||
let links_to_llvm = match link_name {
|
let links_to_llvm =
|
||||||
Some(val) => val.as_str().starts_with("llvm."),
|
link_name.map_or(false, |val| val.as_str().starts_with("llvm."));
|
||||||
_ => false,
|
|
||||||
};
|
|
||||||
if links_to_llvm {
|
if links_to_llvm {
|
||||||
gate_feature_post!(
|
gate_feature_post!(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
@ -75,7 +75,7 @@
|
||||||
//! breaking inconsistently to become
|
//! breaking inconsistently to become
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! foo(hello, there
|
//! foo(hello, there,
|
||||||
//! good, friends);
|
//! good, friends);
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
|
@ -83,7 +83,7 @@
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! foo(hello,
|
//! foo(hello,
|
||||||
//! there
|
//! there,
|
||||||
//! good,
|
//! good,
|
||||||
//! friends);
|
//! friends);
|
||||||
//! ```
|
//! ```
|
||||||
|
|
|
@ -116,7 +116,7 @@ pub struct NativeLib {
|
||||||
|
|
||||||
impl From<&cstore::NativeLib> for NativeLib {
|
impl From<&cstore::NativeLib> for NativeLib {
|
||||||
fn from(lib: &cstore::NativeLib) -> Self {
|
fn from(lib: &cstore::NativeLib) -> Self {
|
||||||
NativeLib { kind: lib.kind.clone(), name: lib.name.clone(), cfg: lib.cfg.clone() }
|
NativeLib { kind: lib.kind, name: lib.name, cfg: lib.cfg.clone() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -523,7 +523,7 @@ where
|
||||||
successors_len: 0,
|
successors_len: 0,
|
||||||
min_depth: depth,
|
min_depth: depth,
|
||||||
min_cycle_root: successor_node,
|
min_cycle_root: successor_node,
|
||||||
successor_node: successor_node,
|
successor_node,
|
||||||
});
|
});
|
||||||
continue 'recurse;
|
continue 'recurse;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1317,7 +1317,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||||
T: TypeFoldable<'tcx>,
|
T: TypeFoldable<'tcx>,
|
||||||
{
|
{
|
||||||
if !value.needs_infer() {
|
if !value.needs_infer() {
|
||||||
return value.clone(); // Avoid duplicated subst-folding.
|
return value; // Avoid duplicated subst-folding.
|
||||||
}
|
}
|
||||||
let mut r = resolve::OpportunisticVarResolver::new(self);
|
let mut r = resolve::OpportunisticVarResolver::new(self);
|
||||||
value.fold_with(&mut r)
|
value.fold_with(&mut r)
|
||||||
|
|
|
@ -28,27 +28,19 @@ declare_lint_pass!(RedundantSemicolons => [REDUNDANT_SEMICOLONS]);
|
||||||
|
|
||||||
impl EarlyLintPass for RedundantSemicolons {
|
impl EarlyLintPass for RedundantSemicolons {
|
||||||
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) {
|
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) {
|
||||||
let mut after_item_stmt = false;
|
|
||||||
let mut seq = None;
|
let mut seq = None;
|
||||||
for stmt in block.stmts.iter() {
|
for stmt in block.stmts.iter() {
|
||||||
match (&stmt.kind, &mut seq) {
|
match (&stmt.kind, &mut seq) {
|
||||||
(StmtKind::Empty, None) => seq = Some((stmt.span, false)),
|
(StmtKind::Empty, None) => seq = Some((stmt.span, false)),
|
||||||
(StmtKind::Empty, Some(seq)) => *seq = (seq.0.to(stmt.span), true),
|
(StmtKind::Empty, Some(seq)) => *seq = (seq.0.to(stmt.span), true),
|
||||||
(_, seq) => {
|
(_, seq) => maybe_lint_redundant_semis(cx, seq),
|
||||||
maybe_lint_redundant_semis(cx, seq, after_item_stmt);
|
|
||||||
after_item_stmt = matches!(stmt.kind, StmtKind::Item(_));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
maybe_lint_redundant_semis(cx, &mut seq);
|
||||||
maybe_lint_redundant_semis(cx, &mut seq, after_item_stmt);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_lint_redundant_semis(
|
fn maybe_lint_redundant_semis(cx: &EarlyContext<'_>, seq: &mut Option<(Span, bool)>) {
|
||||||
cx: &EarlyContext<'_>,
|
|
||||||
seq: &mut Option<(Span, bool)>,
|
|
||||||
after_item_stmt: bool,
|
|
||||||
) {
|
|
||||||
if let Some((span, multiple)) = seq.take() {
|
if let Some((span, multiple)) = seq.take() {
|
||||||
// FIXME: Find a better way of ignoring the trailing
|
// FIXME: Find a better way of ignoring the trailing
|
||||||
// semicolon from macro expansion
|
// semicolon from macro expansion
|
||||||
|
@ -56,12 +48,6 @@ fn maybe_lint_redundant_semis(
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Lint on semicolons after item statements
|
|
||||||
// once doing so doesn't break bootstrapping
|
|
||||||
if after_item_stmt {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
cx.struct_span_lint(REDUNDANT_SEMICOLONS, span, |lint| {
|
cx.struct_span_lint(REDUNDANT_SEMICOLONS, span, |lint| {
|
||||||
let (msg, rem) = if multiple {
|
let (msg, rem) = if multiple {
|
||||||
("unnecessary trailing semicolons", "remove these semicolons")
|
("unnecessary trailing semicolons", "remove these semicolons")
|
||||||
|
|
|
@ -110,10 +110,7 @@ impl<'tcx> PlaceWithHirId<'tcx> {
|
||||||
base: PlaceBase,
|
base: PlaceBase,
|
||||||
projections: Vec<Projection<'tcx>>,
|
projections: Vec<Projection<'tcx>>,
|
||||||
) -> PlaceWithHirId<'tcx> {
|
) -> PlaceWithHirId<'tcx> {
|
||||||
PlaceWithHirId {
|
PlaceWithHirId { hir_id, place: Place { base_ty, base, projections } }
|
||||||
hir_id: hir_id,
|
|
||||||
place: Place { base_ty: base_ty, base: base, projections: projections },
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -306,13 +306,13 @@ macro_rules! make_mir_visitor {
|
||||||
|
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
for statement in statements {
|
for statement in statements {
|
||||||
let location = Location { block: block, statement_index: index };
|
let location = Location { block, statement_index: index };
|
||||||
self.visit_statement(statement, location);
|
self.visit_statement(statement, location);
|
||||||
index += 1;
|
index += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(terminator) = terminator {
|
if let Some(terminator) = terminator {
|
||||||
let location = Location { block: block, statement_index: index };
|
let location = Location { block, statement_index: index };
|
||||||
self.visit_terminator(terminator, location);
|
self.visit_terminator(terminator, location);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1634,7 +1634,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
||||||
|
|
||||||
let layout = tcx.intern_layout(Layout {
|
let layout = tcx.intern_layout(Layout {
|
||||||
variants: Variants::Multiple {
|
variants: Variants::Multiple {
|
||||||
tag: tag,
|
tag,
|
||||||
tag_encoding: TagEncoding::Direct,
|
tag_encoding: TagEncoding::Direct,
|
||||||
tag_field: tag_index,
|
tag_field: tag_index,
|
||||||
variants,
|
variants,
|
||||||
|
|
|
@ -328,8 +328,8 @@ struct SplitIntRange {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SplitIntRange {
|
impl SplitIntRange {
|
||||||
fn new(r: IntRange) -> Self {
|
fn new(range: IntRange) -> Self {
|
||||||
SplitIntRange { range: r.clone(), borders: Vec::new() }
|
SplitIntRange { range, borders: Vec::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Internal use
|
/// Internal use
|
||||||
|
|
|
@ -4,24 +4,25 @@ use std::str::FromStr;
|
||||||
|
|
||||||
use rustc_macros::HashStable_Generic;
|
use rustc_macros::HashStable_Generic;
|
||||||
|
|
||||||
/// The edition of the compiler (RFC 2052)
|
/// The edition of the compiler. (See [RFC 2052](https://github.com/rust-lang/rfcs/blob/master/text/2052-epochs.md).)
|
||||||
#[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, Encodable, Decodable, Eq)]
|
#[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, Encodable, Decodable, Eq)]
|
||||||
#[derive(HashStable_Generic)]
|
#[derive(HashStable_Generic)]
|
||||||
pub enum Edition {
|
pub enum Edition {
|
||||||
// editions must be kept in order, oldest to newest
|
// When adding new editions, be sure to do the following:
|
||||||
|
//
|
||||||
|
// - update the `ALL_EDITIONS` const
|
||||||
|
// - update the `EDITION_NAME_LIST` const
|
||||||
|
// - add a `rust_####()` function to the session
|
||||||
|
// - update the enum in Cargo's sources as well
|
||||||
|
//
|
||||||
|
// Editions *must* be kept in order, oldest to newest.
|
||||||
/// The 2015 edition
|
/// The 2015 edition
|
||||||
Edition2015,
|
Edition2015,
|
||||||
/// The 2018 edition
|
/// The 2018 edition
|
||||||
Edition2018,
|
Edition2018,
|
||||||
// when adding new editions, be sure to update:
|
|
||||||
//
|
|
||||||
// - Update the `ALL_EDITIONS` const
|
|
||||||
// - Update the EDITION_NAME_LIST const
|
|
||||||
// - add a `rust_####()` function to the session
|
|
||||||
// - update the enum in Cargo's sources as well
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// must be in order from oldest to newest
|
// Must be in order from oldest to newest.
|
||||||
pub const ALL_EDITIONS: &[Edition] = &[Edition::Edition2015, Edition::Edition2018];
|
pub const ALL_EDITIONS: &[Edition] = &[Edition::Edition2015, Edition::Edition2018];
|
||||||
|
|
||||||
pub const EDITION_NAME_LIST: &str = "2015|2018";
|
pub const EDITION_NAME_LIST: &str = "2015|2018";
|
||||||
|
|
|
@ -1,10 +1,16 @@
|
||||||
|
//! Levenshtein distances.
|
||||||
|
//!
|
||||||
|
//! The [Levenshtein distance] is a metric for measuring the difference between two strings.
|
||||||
|
//!
|
||||||
|
//! [Levenshtein distance]: https://en.wikipedia.org/wiki/Levenshtein_distance
|
||||||
|
|
||||||
use crate::symbol::Symbol;
|
use crate::symbol::Symbol;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
/// Finds the Levenshtein distance between two strings
|
/// Finds the Levenshtein distance between two strings.
|
||||||
pub fn lev_distance(a: &str, b: &str) -> usize {
|
pub fn lev_distance(a: &str, b: &str) -> usize {
|
||||||
// cases which don't require further computation
|
// cases which don't require further computation
|
||||||
if a.is_empty() {
|
if a.is_empty() {
|
||||||
|
@ -35,14 +41,14 @@ pub fn lev_distance(a: &str, b: &str) -> usize {
|
||||||
dcol[t_last + 1]
|
dcol[t_last + 1]
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds the best match for a given word in the given iterator
|
/// Finds the best match for a given word in the given iterator.
|
||||||
///
|
///
|
||||||
/// As a loose rule to avoid the obviously incorrect suggestions, it takes
|
/// As a loose rule to avoid the obviously incorrect suggestions, it takes
|
||||||
/// an optional limit for the maximum allowable edit distance, which defaults
|
/// an optional limit for the maximum allowable edit distance, which defaults
|
||||||
/// to one-third of the given word.
|
/// to one-third of the given word.
|
||||||
///
|
///
|
||||||
/// Besides Levenshtein, we use case insensitive comparison to improve accuracy on an edge case with
|
/// Besides Levenshtein, we use case insensitive comparison to improve accuracy
|
||||||
/// a lower(upper)case letters mismatch.
|
/// on an edge case with a lower(upper)case letters mismatch.
|
||||||
#[cold]
|
#[cold]
|
||||||
pub fn find_best_match_for_name(
|
pub fn find_best_match_for_name(
|
||||||
name_vec: &[Symbol],
|
name_vec: &[Symbol],
|
||||||
|
@ -98,7 +104,7 @@ fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Sym
|
||||||
|
|
||||||
fn sort_by_words(name: &str) -> String {
|
fn sort_by_words(name: &str) -> String {
|
||||||
let mut split_words: Vec<&str> = name.split('_').collect();
|
let mut split_words: Vec<&str> = name.split('_').collect();
|
||||||
// We are sorting primitive &strs and can use unstable sort here
|
// We are sorting primitive &strs and can use unstable sort here.
|
||||||
split_words.sort_unstable();
|
split_words.sort_unstable();
|
||||||
split_words.join("_")
|
split_words.join("_")
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,13 @@
|
||||||
//! The source positions and related helper functions.
|
//! Source positions and related helper functions.
|
||||||
|
//!
|
||||||
|
//! Important concepts in this module include:
|
||||||
|
//!
|
||||||
|
//! - the *span*, represented by [`SpanData`] and related types;
|
||||||
|
//! - source code as represented by a [`SourceMap`]; and
|
||||||
|
//! - interned strings, represented by [`Symbol`]s, with some common symbols available statically in the [`sym`] module.
|
||||||
|
//!
|
||||||
|
//! Unlike most compilers, the span contains not only the position in the source code, but also various other metadata,
|
||||||
|
//! such as the edition and macro hygiene. This metadata is stored in [`SyntaxContext`] and [`ExpnData`].
|
||||||
//!
|
//!
|
||||||
//! ## Note
|
//! ## Note
|
||||||
//!
|
//!
|
||||||
|
@ -124,7 +133,7 @@ pub enum RealFileName {
|
||||||
|
|
||||||
impl RealFileName {
|
impl RealFileName {
|
||||||
/// Returns the path suitable for reading from the file system on the local host.
|
/// Returns the path suitable for reading from the file system on the local host.
|
||||||
/// Avoid embedding this in build artifacts; see `stable_name` for that.
|
/// Avoid embedding this in build artifacts; see `stable_name()` for that.
|
||||||
pub fn local_path(&self) -> &Path {
|
pub fn local_path(&self) -> &Path {
|
||||||
match self {
|
match self {
|
||||||
RealFileName::Named(p)
|
RealFileName::Named(p)
|
||||||
|
@ -133,7 +142,7 @@ impl RealFileName {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path suitable for reading from the file system on the local host.
|
/// Returns the path suitable for reading from the file system on the local host.
|
||||||
/// Avoid embedding this in build artifacts; see `stable_name` for that.
|
/// Avoid embedding this in build artifacts; see `stable_name()` for that.
|
||||||
pub fn into_local_path(self) -> PathBuf {
|
pub fn into_local_path(self) -> PathBuf {
|
||||||
match self {
|
match self {
|
||||||
RealFileName::Named(p)
|
RealFileName::Named(p)
|
||||||
|
@ -143,7 +152,7 @@ impl RealFileName {
|
||||||
|
|
||||||
/// Returns the path suitable for embedding into build artifacts. Note that
|
/// Returns the path suitable for embedding into build artifacts. Note that
|
||||||
/// a virtualized path will not correspond to a valid file system path; see
|
/// a virtualized path will not correspond to a valid file system path; see
|
||||||
/// `local_path` for something that is more likely to return paths into the
|
/// `local_path()` for something that is more likely to return paths into the
|
||||||
/// local host file system.
|
/// local host file system.
|
||||||
pub fn stable_name(&self) -> &Path {
|
pub fn stable_name(&self) -> &Path {
|
||||||
match self {
|
match self {
|
||||||
|
@ -173,7 +182,7 @@ pub enum FileName {
|
||||||
/// Custom sources for explicit parser calls from plugins and drivers.
|
/// Custom sources for explicit parser calls from plugins and drivers.
|
||||||
Custom(String),
|
Custom(String),
|
||||||
DocTest(PathBuf, isize),
|
DocTest(PathBuf, isize),
|
||||||
/// Post-substitution inline assembly from LLVM
|
/// Post-substitution inline assembly from LLVM.
|
||||||
InlineAsm(u64),
|
InlineAsm(u64),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,14 +275,17 @@ impl FileName {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Represents a span.
|
||||||
|
///
|
||||||
/// Spans represent a region of code, used for error reporting. Positions in spans
|
/// Spans represent a region of code, used for error reporting. Positions in spans
|
||||||
/// are *absolute* positions from the beginning of the source_map, not positions
|
/// are *absolute* positions from the beginning of the [`SourceMap`], not positions
|
||||||
/// relative to `SourceFile`s. Methods on the `SourceMap` can be used to relate spans back
|
/// relative to [`SourceFile`]s. Methods on the `SourceMap` can be used to relate spans back
|
||||||
/// to the original source.
|
/// to the original source.
|
||||||
/// You must be careful if the span crosses more than one file - you will not be
|
///
|
||||||
|
/// You must be careful if the span crosses more than one file, since you will not be
|
||||||
/// able to use many of the functions on spans in source_map and you cannot assume
|
/// able to use many of the functions on spans in source_map and you cannot assume
|
||||||
/// that the length of the `span = hi - lo`; there may be space in the `BytePos`
|
/// that the length of the span is equal to `span.hi - span.lo`; there may be space in the
|
||||||
/// range between files.
|
/// [`BytePos`] range between files.
|
||||||
///
|
///
|
||||||
/// `SpanData` is public because `Span` uses a thread-local interner and can't be
|
/// `SpanData` is public because `Span` uses a thread-local interner and can't be
|
||||||
/// sent to other threads, but some pieces of performance infra run in a separate thread.
|
/// sent to other threads, but some pieces of performance infra run in a separate thread.
|
||||||
|
@ -384,7 +396,7 @@ impl Span {
|
||||||
Span::new(lo, hi, SyntaxContext::root())
|
Span::new(lo, hi, SyntaxContext::root())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new span representing an empty span at the beginning of this span
|
/// Returns a new span representing an empty span at the beginning of this span.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn shrink_to_lo(self) -> Span {
|
pub fn shrink_to_lo(self) -> Span {
|
||||||
let span = self.data();
|
let span = self.data();
|
||||||
|
@ -398,7 +410,7 @@ impl Span {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// Returns true if hi == lo
|
/// Returns `true` if `hi == lo`.
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
let span = self.data();
|
let span = self.data();
|
||||||
span.hi == span.lo
|
span.hi == span.lo
|
||||||
|
@ -512,7 +524,7 @@ impl Span {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if a span is "internal" to a macro in which `unsafe`
|
/// Checks if a span is "internal" to a macro in which `unsafe`
|
||||||
/// can be used without triggering the `unsafe_code` lint
|
/// can be used without triggering the `unsafe_code` lint.
|
||||||
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
||||||
pub fn allows_unsafe(&self) -> bool {
|
pub fn allows_unsafe(&self) -> bool {
|
||||||
self.ctxt().outer_expn_data().allow_internal_unsafe
|
self.ctxt().outer_expn_data().allow_internal_unsafe
|
||||||
|
@ -700,6 +712,7 @@ impl Span {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A span together with some additional data.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct SpanLabel {
|
pub struct SpanLabel {
|
||||||
/// The span we are going to include in the final snippet.
|
/// The span we are going to include in the final snippet.
|
||||||
|
@ -743,7 +756,7 @@ impl<D: Decoder> Decodable<D> for Span {
|
||||||
/// any spans that are debug-printed during the closure's execution.
|
/// any spans that are debug-printed during the closure's execution.
|
||||||
///
|
///
|
||||||
/// Normally, the global `TyCtxt` is used to retrieve the `SourceMap`
|
/// Normally, the global `TyCtxt` is used to retrieve the `SourceMap`
|
||||||
/// (see `rustc_interface::callbacks::span_debug1). However, some parts
|
/// (see `rustc_interface::callbacks::span_debug1`). However, some parts
|
||||||
/// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before
|
/// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before
|
||||||
/// a `TyCtxt` is available. In this case, we fall back to
|
/// a `TyCtxt` is available. In this case, we fall back to
|
||||||
/// the `SourceMap` provided to this function. If that is not available,
|
/// the `SourceMap` provided to this function. If that is not available,
|
||||||
|
@ -994,9 +1007,9 @@ pub enum ExternalSource {
|
||||||
Unneeded,
|
Unneeded,
|
||||||
Foreign {
|
Foreign {
|
||||||
kind: ExternalSourceKind,
|
kind: ExternalSourceKind,
|
||||||
/// This SourceFile's byte-offset within the source_map of its original crate
|
/// This SourceFile's byte-offset within the source_map of its original crate.
|
||||||
original_start_pos: BytePos,
|
original_start_pos: BytePos,
|
||||||
/// The end of this SourceFile within the source_map of its original crate
|
/// The end of this SourceFile within the source_map of its original crate.
|
||||||
original_end_pos: BytePos,
|
original_end_pos: BytePos,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -1099,7 +1112,7 @@ impl SourceFileHash {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A single source in the `SourceMap`.
|
/// A single source in the [`SourceMap`].
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SourceFile {
|
pub struct SourceFile {
|
||||||
/// The name of the file that the source came from. Source that doesn't
|
/// The name of the file that the source came from. Source that doesn't
|
||||||
|
@ -1580,7 +1593,7 @@ fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
||||||
|
|
||||||
/// Replaces `\r\n` with `\n` in-place in `src`.
|
/// Replaces `\r\n` with `\n` in-place in `src`.
|
||||||
///
|
///
|
||||||
/// Returns error if there's a lone `\r` in the string
|
/// Returns error if there's a lone `\r` in the string.
|
||||||
fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
||||||
if !src.as_bytes().contains(&b'\r') {
|
if !src.as_bytes().contains(&b'\r') {
|
||||||
return;
|
return;
|
||||||
|
@ -1705,13 +1718,16 @@ macro_rules! impl_pos {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_pos! {
|
impl_pos! {
|
||||||
/// A byte offset. Keep this small (currently 32-bits), as AST contains
|
/// A byte offset.
|
||||||
/// a lot of them.
|
///
|
||||||
|
/// Keep this small (currently 32-bits), as AST contains a lot of them.
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
||||||
pub struct BytePos(pub u32);
|
pub struct BytePos(pub u32);
|
||||||
|
|
||||||
/// A character offset. Because of multibyte UTF-8 characters, a byte offset
|
/// A character offset.
|
||||||
/// is not equivalent to a character offset. The `SourceMap` will convert `BytePos`
|
///
|
||||||
|
/// Because of multibyte UTF-8 characters, a byte offset
|
||||||
|
/// is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`]
|
||||||
/// values to `CharPos` values as necessary.
|
/// values to `CharPos` values as necessary.
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
||||||
pub struct CharPos(pub usize);
|
pub struct CharPos(pub usize);
|
||||||
|
@ -1835,8 +1851,9 @@ fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Requirements for a `StableHashingContext` to be used in this crate.
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||||
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
///
|
||||||
/// instead of implementing everything in librustc_middle.
|
/// This is a hack to allow using the [`HashStable_Generic`] derive macro
|
||||||
|
/// instead of implementing everything in rustc_middle.
|
||||||
pub trait HashStableContext {
|
pub trait HashStableContext {
|
||||||
fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher);
|
fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher);
|
||||||
fn hash_crate_num(&mut self, _: CrateNum, hasher: &mut StableHasher);
|
fn hash_crate_num(&mut self, _: CrateNum, hasher: &mut StableHasher);
|
||||||
|
@ -1856,6 +1873,7 @@ where
|
||||||
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
||||||
/// triple, which stays the same even if the containing `SourceFile` has moved
|
/// triple, which stays the same even if the containing `SourceFile` has moved
|
||||||
/// within the `SourceMap`.
|
/// within the `SourceMap`.
|
||||||
|
///
|
||||||
/// Also note that we are hashing byte offsets for the column, not unicode
|
/// Also note that we are hashing byte offsets for the column, not unicode
|
||||||
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
||||||
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
//! The `SourceMap` tracks all the source code used within a single crate, mapping
|
//! Types for tracking pieces of source code within a crate.
|
||||||
|
//!
|
||||||
|
//! The [`SourceMap`] tracks all the source code used within a single crate, mapping
|
||||||
//! from integer byte positions to the original source code location. Each bit
|
//! from integer byte positions to the original source code location. Each bit
|
||||||
//! of source parsed during crate parsing (typically files, in-memory strings,
|
//! of source parsed during crate parsing (typically files, in-memory strings,
|
||||||
//! or various bits of macro expansion) cover a continuous range of bytes in the
|
//! or various bits of macro expansion) cover a continuous range of bytes in the
|
||||||
//! `SourceMap` and are represented by `SourceFile`s. Byte positions are stored in
|
//! `SourceMap` and are represented by [`SourceFile`]s. Byte positions are stored in
|
||||||
//! `Span` and used pervasively in the compiler. They are absolute positions
|
//! [`Span`] and used pervasively in the compiler. They are absolute positions
|
||||||
//! within the `SourceMap`, which upon request can be converted to line and column
|
//! within the `SourceMap`, which upon request can be converted to line and column
|
||||||
//! information, source code snippets, etc.
|
//! information, source code snippets, etc.
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ use rustc_data_structures::fx::FxIndexSet;
|
||||||
|
|
||||||
/// A compressed span.
|
/// A compressed span.
|
||||||
///
|
///
|
||||||
/// `SpanData` is 12 bytes, which is a bit too big to stick everywhere. `Span`
|
/// Whereas [`SpanData`] is 12 bytes, which is a bit too big to stick everywhere, `Span`
|
||||||
/// is a form that only takes up 8 bytes, with less space for the length and
|
/// is a form that only takes up 8 bytes, with less space for the length and
|
||||||
/// context. The vast majority (99.9%+) of `SpanData` instances will fit within
|
/// context. The vast majority (99.9%+) of `SpanData` instances will fit within
|
||||||
/// those 8 bytes; any `SpanData` whose fields don't fit into a `Span` are
|
/// those 8 bytes; any `SpanData` whose fields don't fit into a `Span` are
|
||||||
|
@ -42,13 +42,11 @@ use rustc_data_structures::fx::FxIndexSet;
|
||||||
/// - `base` is 32 bits in both `Span` and `SpanData`, which means that `base`
|
/// - `base` is 32 bits in both `Span` and `SpanData`, which means that `base`
|
||||||
/// values never cause interning. The number of bits needed for `base`
|
/// values never cause interning. The number of bits needed for `base`
|
||||||
/// depends on the crate size. 32 bits allows up to 4 GiB of code in a crate.
|
/// depends on the crate size. 32 bits allows up to 4 GiB of code in a crate.
|
||||||
/// `script-servo` is the largest crate in `rustc-perf`, requiring 26 bits
|
|
||||||
/// for some spans.
|
|
||||||
/// - `len` is 15 bits in `Span` (a u16, minus 1 bit for the tag) and 32 bits
|
/// - `len` is 15 bits in `Span` (a u16, minus 1 bit for the tag) and 32 bits
|
||||||
/// in `SpanData`, which means that large `len` values will cause interning.
|
/// in `SpanData`, which means that large `len` values will cause interning.
|
||||||
/// The number of bits needed for `len` does not depend on the crate size.
|
/// The number of bits needed for `len` does not depend on the crate size.
|
||||||
/// The most common number of bits for `len` are 0--7, with a peak usually at
|
/// The most common numbers of bits for `len` are from 0 to 7, with a peak usually
|
||||||
/// 3 or 4, and then it drops off quickly from 8 onwards. 15 bits is enough
|
/// at 3 or 4, and then it drops off quickly from 8 onwards. 15 bits is enough
|
||||||
/// for 99.99%+ of cases, but larger values (sometimes 20+ bits) might occur
|
/// for 99.99%+ of cases, but larger values (sometimes 20+ bits) might occur
|
||||||
/// dozens of times in a typical crate.
|
/// dozens of times in a typical crate.
|
||||||
/// - `ctxt` is 16 bits in `Span` and 32 bits in `SpanData`, which means that
|
/// - `ctxt` is 16 bits in `Span` and 32 bits in `SpanData`, which means that
|
||||||
|
|
|
@ -92,6 +92,7 @@ pub fn intrinsic_operation_unsafety(intrinsic: Symbol) -> hir::Unsafety {
|
||||||
| sym::rustc_peek
|
| sym::rustc_peek
|
||||||
| sym::maxnumf64
|
| sym::maxnumf64
|
||||||
| sym::type_name
|
| sym::type_name
|
||||||
|
| sym::forget
|
||||||
| sym::variant_count => hir::Unsafety::Normal,
|
| sym::variant_count => hir::Unsafety::Normal,
|
||||||
_ => hir::Unsafety::Unsafe,
|
_ => hir::Unsafety::Unsafe,
|
||||||
}
|
}
|
||||||
|
|
|
@ -390,7 +390,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
|
|
||||||
let min_cap_list = match root_var_min_capture_list.get_mut(&var_hir_id) {
|
let min_cap_list = match root_var_min_capture_list.get_mut(&var_hir_id) {
|
||||||
None => {
|
None => {
|
||||||
let min_cap_list = vec![ty::CapturedPlace { place: place, info: capture_info }];
|
let min_cap_list = vec![ty::CapturedPlace { place, info: capture_info }];
|
||||||
root_var_min_capture_list.insert(var_hir_id, min_cap_list);
|
root_var_min_capture_list.insert(var_hir_id, min_cap_list);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -459,7 +459,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
|
||||||
kind: ProjectionKind,
|
kind: ProjectionKind,
|
||||||
) -> PlaceWithHirId<'tcx> {
|
) -> PlaceWithHirId<'tcx> {
|
||||||
let mut projections = base_place.place.projections;
|
let mut projections = base_place.place.projections;
|
||||||
projections.push(Projection { kind: kind, ty: ty });
|
projections.push(Projection { kind, ty });
|
||||||
let ret = PlaceWithHirId::new(
|
let ret = PlaceWithHirId::new(
|
||||||
node.hir_id(),
|
node.hir_id(),
|
||||||
base_place.place.base_ty,
|
base_place.place.base_ty,
|
||||||
|
|
|
@ -151,9 +151,14 @@ pub const fn forget<T>(t: T) {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[unstable(feature = "forget_unsized", issue = "none")]
|
#[unstable(feature = "forget_unsized", issue = "none")]
|
||||||
pub fn forget_unsized<T: ?Sized>(t: T) {
|
pub fn forget_unsized<T: ?Sized>(t: T) {
|
||||||
|
#[cfg(bootstrap)]
|
||||||
// SAFETY: the forget intrinsic could be safe, but there's no point in making it safe since
|
// SAFETY: the forget intrinsic could be safe, but there's no point in making it safe since
|
||||||
// we'll be implementing this function soon via `ManuallyDrop`
|
// we'll be implementing this function soon via `ManuallyDrop`
|
||||||
unsafe { intrinsics::forget(t) }
|
unsafe {
|
||||||
|
intrinsics::forget(t)
|
||||||
|
}
|
||||||
|
#[cfg(not(bootstrap))]
|
||||||
|
intrinsics::forget(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the size of a type in bytes.
|
/// Returns the size of a type in bytes.
|
||||||
|
|
|
@ -237,7 +237,7 @@ mod inner {
|
||||||
// `denom` field.
|
// `denom` field.
|
||||||
//
|
//
|
||||||
// Encoding this as a single `AtomicU64` allows us to use `Relaxed`
|
// Encoding this as a single `AtomicU64` allows us to use `Relaxed`
|
||||||
// operations, as we are only interested in in the effects on a single
|
// operations, as we are only interested in the effects on a single
|
||||||
// memory location.
|
// memory location.
|
||||||
static INFO_BITS: AtomicU64 = AtomicU64::new(0);
|
static INFO_BITS: AtomicU64 = AtomicU64::new(0);
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,12 @@ All notable changes to bootstrap will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
## [Non-breaking changes since the last major version]
|
|
||||||
|
## [Changes since the last major version]
|
||||||
|
|
||||||
|
- `llvm-libunwind` now accepts `in-tree` (formerly true), `system` or `no` (formerly false) [#77703](https://github.com/rust-lang/rust/pull/77703)
|
||||||
|
|
||||||
|
### Non-breaking changes
|
||||||
|
|
||||||
- `x.py check` needs opt-in to check tests (--all-targets) [#77473](https://github.com/rust-lang/rust/pull/77473)
|
- `x.py check` needs opt-in to check tests (--all-targets) [#77473](https://github.com/rust-lang/rust/pull/77473)
|
||||||
- The default bootstrap profiles are now located at `bootstrap/defaults/config.$PROFILE.toml` (previously they were located at `bootstrap/defaults/config.toml.$PROFILE`) [#77558](https://github.com/rust-lang/rust/pull/77558)
|
- The default bootstrap profiles are now located at `bootstrap/defaults/config.$PROFILE.toml` (previously they were located at `bootstrap/defaults/config.toml.$PROFILE`) [#77558](https://github.com/rust-lang/rust/pull/77558)
|
||||||
|
|
|
@ -31,7 +31,7 @@ parameters (e.g. pointers), globals, etc. `#[ffi_pure]` functions are not
|
||||||
referentially-transparent, and are therefore more relaxed than `#[ffi_const]`
|
referentially-transparent, and are therefore more relaxed than `#[ffi_const]`
|
||||||
functions.
|
functions.
|
||||||
|
|
||||||
However, accesing global memory through volatile or atomic reads can violate the
|
However, accessing global memory through volatile or atomic reads can violate the
|
||||||
requirement that two consecutive function calls shall return the same value.
|
requirement that two consecutive function calls shall return the same value.
|
||||||
|
|
||||||
A `pure` function that returns unit has no effect on the abstract machine's
|
A `pure` function that returns unit has no effect on the abstract machine's
|
||||||
|
|
|
@ -597,7 +597,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||||
ref mut bindings, ..
|
ref mut bindings, ..
|
||||||
} => {
|
} => {
|
||||||
bindings.push(TypeBinding {
|
bindings.push(TypeBinding {
|
||||||
name: left_name.clone(),
|
name: left_name,
|
||||||
kind: TypeBindingKind::Equality { ty: rhs },
|
kind: TypeBindingKind::Equality { ty: rhs },
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -665,7 +665,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||||
GenericParamDefKind::Type { ref mut default, ref mut bounds, .. } => {
|
GenericParamDefKind::Type { ref mut default, ref mut bounds, .. } => {
|
||||||
// We never want something like `impl<T=Foo>`.
|
// We never want something like `impl<T=Foo>`.
|
||||||
default.take();
|
default.take();
|
||||||
let generic_ty = Type::Generic(param.name.clone());
|
let generic_ty = Type::Generic(param.name);
|
||||||
if !has_sized.contains(&generic_ty) {
|
if !has_sized.contains(&generic_ty) {
|
||||||
bounds.insert(0, GenericBound::maybe_sized(self.cx));
|
bounds.insert(0, GenericBound::maybe_sized(self.cx));
|
||||||
}
|
}
|
||||||
|
|
|
@ -831,7 +831,7 @@ impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics, ty::GenericPredicates<'tcx
|
||||||
where_predicates.retain(|pred| match *pred {
|
where_predicates.retain(|pred| match *pred {
|
||||||
WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
|
WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
|
||||||
if bounds.iter().any(|b| b.is_sized_bound(cx)) {
|
if bounds.iter().any(|b| b.is_sized_bound(cx)) {
|
||||||
sized_params.insert(g.clone());
|
sized_params.insert(*g);
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
true
|
true
|
||||||
|
@ -847,7 +847,7 @@ impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics, ty::GenericPredicates<'tcx
|
||||||
&& !sized_params.contains(&tp.name)
|
&& !sized_params.contains(&tp.name)
|
||||||
{
|
{
|
||||||
where_predicates.push(WP::BoundPredicate {
|
where_predicates.push(WP::BoundPredicate {
|
||||||
ty: Type::Generic(tp.name.clone()),
|
ty: Type::Generic(tp.name),
|
||||||
bounds: vec![GenericBound::maybe_sized(cx)],
|
bounds: vec![GenericBound::maybe_sized(cx)],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,7 +74,7 @@ crate fn krate(mut cx: &mut DocContext<'_>) -> Crate {
|
||||||
)
|
)
|
||||||
}));
|
}));
|
||||||
m.items.extend(keywords.into_iter().map(|(def_id, kw)| {
|
m.items.extend(keywords.into_iter().map(|(def_id, kw)| {
|
||||||
Item::from_def_id_and_parts(def_id, Some(kw.clone()), ItemKind::KeywordItem(kw), cx)
|
Item::from_def_id_and_parts(def_id, Some(kw), ItemKind::KeywordItem(kw), cx)
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -307,7 +307,7 @@ crate fn strip_path(path: &Path) -> Path {
|
||||||
.segments
|
.segments
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| PathSegment {
|
.map(|s| PathSegment {
|
||||||
name: s.name.clone(),
|
name: s.name,
|
||||||
args: GenericArgs::AngleBracketed { args: vec![], bindings: vec![] },
|
args: GenericArgs::AngleBracketed { args: vec![], bindings: vec![] },
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -558,12 +558,12 @@ crate fn make_test(
|
||||||
"fn main() {{ {}fn {}() -> Result<(), impl core::fmt::Debug> {{\n",
|
"fn main() {{ {}fn {}() -> Result<(), impl core::fmt::Debug> {{\n",
|
||||||
inner_attr, inner_fn_name
|
inner_attr, inner_fn_name
|
||||||
),
|
),
|
||||||
format!("\n}}; {}().unwrap() }}", inner_fn_name),
|
format!("\n}} {}().unwrap() }}", inner_fn_name),
|
||||||
)
|
)
|
||||||
} else if test_id.is_some() {
|
} else if test_id.is_some() {
|
||||||
(
|
(
|
||||||
format!("fn main() {{ {}fn {}() {{\n", inner_attr, inner_fn_name),
|
format!("fn main() {{ {}fn {}() {{\n", inner_attr, inner_fn_name),
|
||||||
format!("\n}}; {}() }}", inner_fn_name),
|
format!("\n}} {}() }}", inner_fn_name),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
("fn main() {\n".into(), "\n}".into())
|
("fn main() {\n".into(), "\n}".into())
|
||||||
|
|
|
@ -292,7 +292,7 @@ use std::io;
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
io::stdin().read_line(&mut input)?;
|
io::stdin().read_line(&mut input)?;
|
||||||
Ok::<(), io:Error>(())
|
Ok::<(), io:Error>(())
|
||||||
}; _inner().unwrap() }"
|
} _inner().unwrap() }"
|
||||||
.to_string();
|
.to_string();
|
||||||
let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
|
let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None);
|
||||||
assert_eq!((output, len), (expected, 2));
|
assert_eq!((output, len), (expected, 2));
|
||||||
|
@ -306,7 +306,7 @@ fn make_test_named_wrapper() {
|
||||||
let expected = "#![allow(unused)]
|
let expected = "#![allow(unused)]
|
||||||
fn main() { #[allow(non_snake_case)] fn _doctest_main__some_unique_name() {
|
fn main() { #[allow(non_snake_case)] fn _doctest_main__some_unique_name() {
|
||||||
assert_eq!(2+2, 4);
|
assert_eq!(2+2, 4);
|
||||||
}; _doctest_main__some_unique_name() }"
|
} _doctest_main__some_unique_name() }"
|
||||||
.to_string();
|
.to_string();
|
||||||
let (output, len, _) =
|
let (output, len, _) =
|
||||||
make_test(input, None, false, &opts, DEFAULT_EDITION, Some("_some_unique_name"));
|
make_test(input, None, false, &opts, DEFAULT_EDITION, Some("_some_unique_name"));
|
||||||
|
|
|
@ -1027,7 +1027,7 @@ fn markdown_summary_with_limit(md: &str, length_limit: usize) -> (String, bool)
|
||||||
fn push(s: &mut String, text_length: &mut usize, text: &str) {
|
fn push(s: &mut String, text_length: &mut usize, text: &str) {
|
||||||
s.push_str(text);
|
s.push_str(text);
|
||||||
*text_length += text.len();
|
*text_length += text.len();
|
||||||
};
|
}
|
||||||
|
|
||||||
'outer: for event in Parser::new_ext(md, summary_opts()) {
|
'outer: for event in Parser::new_ext(md, summary_opts()) {
|
||||||
match &event {
|
match &event {
|
||||||
|
|
|
@ -538,7 +538,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
||||||
fn after_krate(&mut self, krate: &clean::Crate, cache: &Cache) -> Result<(), Error> {
|
fn after_krate(&mut self, krate: &clean::Crate, cache: &Cache) -> Result<(), Error> {
|
||||||
let final_file = self.dst.join(&*krate.name.as_str()).join("all.html");
|
let final_file = self.dst.join(&*krate.name.as_str()).join("all.html");
|
||||||
let settings_file = self.dst.join("settings.html");
|
let settings_file = self.dst.join("settings.html");
|
||||||
let crate_name = krate.name.clone();
|
let crate_name = krate.name;
|
||||||
|
|
||||||
let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
|
let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
|
||||||
if !root_path.ends_with('/') {
|
if !root_path.ends_with('/') {
|
||||||
|
@ -3967,7 +3967,7 @@ fn render_impl(
|
||||||
cache: &Cache,
|
cache: &Cache,
|
||||||
) {
|
) {
|
||||||
for trait_item in &t.items {
|
for trait_item in &t.items {
|
||||||
let n = trait_item.name.clone();
|
let n = trait_item.name;
|
||||||
if i.items.iter().any(|m| m.name == n) {
|
if i.items.iter().any(|m| m.name == n) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,7 +122,9 @@ h3.impl, h3.method, h3.type {
|
||||||
h1, h2, h3, h4,
|
h1, h2, h3, h4,
|
||||||
.sidebar, a.source, .search-input, .content table td:first-child > a,
|
.sidebar, a.source, .search-input, .content table td:first-child > a,
|
||||||
.collapse-toggle, div.item-list .out-of-band,
|
.collapse-toggle, div.item-list .out-of-band,
|
||||||
#source-sidebar, #sidebar-toggle {
|
#source-sidebar, #sidebar-toggle,
|
||||||
|
/* This selector is for the items listed in the "all items" page. */
|
||||||
|
#main > ul.docblock > li > a {
|
||||||
font-family: "Fira Sans", sans-serif;
|
font-family: "Fira Sans", sans-serif;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1234,7 +1234,7 @@ impl LinkCollector<'_, '_> {
|
||||||
) -> Option<(Res, Option<String>)> {
|
) -> Option<(Res, Option<String>)> {
|
||||||
// Try to look up both the result and the corresponding side channel value
|
// Try to look up both the result and the corresponding side channel value
|
||||||
if let Some(ref cached) = self.visited_links.get(&key) {
|
if let Some(ref cached) = self.visited_links.get(&key) {
|
||||||
self.kind_side_channel.set(cached.side_channel.clone());
|
self.kind_side_channel.set(cached.side_channel);
|
||||||
return Some(cached.res.clone());
|
return Some(cached.res.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,27 +4,21 @@
|
||||||
fn forget(_1: T) -> () {
|
fn forget(_1: T) -> () {
|
||||||
debug t => _1; // in scope 0 at $DIR/lower_intrinsics.rs:18:18: 18:19
|
debug t => _1; // in scope 0 at $DIR/lower_intrinsics.rs:18:18: 18:19
|
||||||
let mut _0: (); // return place in scope 0 at $DIR/lower_intrinsics.rs:18:24: 18:24
|
let mut _0: (); // return place in scope 0 at $DIR/lower_intrinsics.rs:18:24: 18:24
|
||||||
let _2: (); // in scope 0 at $DIR/lower_intrinsics.rs:19:14: 19:41
|
let mut _2: T; // in scope 0 at $DIR/lower_intrinsics.rs:19:30: 19:31
|
||||||
let mut _3: T; // in scope 0 at $DIR/lower_intrinsics.rs:19:39: 19:40
|
|
||||||
scope 1 {
|
|
||||||
}
|
|
||||||
|
|
||||||
bb0: {
|
bb0: {
|
||||||
StorageLive(_2); // scope 0 at $DIR/lower_intrinsics.rs:19:5: 19:43
|
StorageLive(_2); // scope 0 at $DIR/lower_intrinsics.rs:19:30: 19:31
|
||||||
StorageLive(_3); // scope 1 at $DIR/lower_intrinsics.rs:19:39: 19:40
|
_2 = move _1; // scope 0 at $DIR/lower_intrinsics.rs:19:30: 19:31
|
||||||
_3 = move _1; // scope 1 at $DIR/lower_intrinsics.rs:19:39: 19:40
|
- _0 = std::intrinsics::forget::<T>(move _2) -> bb1; // scope 0 at $DIR/lower_intrinsics.rs:19:5: 19:32
|
||||||
- _2 = std::intrinsics::forget::<T>(move _3) -> bb1; // scope 1 at $DIR/lower_intrinsics.rs:19:14: 19:41
|
|
||||||
- // mir::Constant
|
- // mir::Constant
|
||||||
- // + span: $DIR/lower_intrinsics.rs:19:14: 19:38
|
- // + span: $DIR/lower_intrinsics.rs:19:5: 19:29
|
||||||
- // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(T) {std::intrinsics::forget::<T>}, val: Value(Scalar(<ZST>)) }
|
- // + literal: Const { ty: extern "rust-intrinsic" fn(T) {std::intrinsics::forget::<T>}, val: Value(Scalar(<ZST>)) }
|
||||||
+ _2 = const (); // scope 1 at $DIR/lower_intrinsics.rs:19:14: 19:41
|
+ _0 = const (); // scope 0 at $DIR/lower_intrinsics.rs:19:5: 19:32
|
||||||
+ goto -> bb1; // scope 1 at $DIR/lower_intrinsics.rs:19:14: 19:41
|
+ goto -> bb1; // scope 0 at $DIR/lower_intrinsics.rs:19:5: 19:32
|
||||||
}
|
}
|
||||||
|
|
||||||
bb1: {
|
bb1: {
|
||||||
StorageDead(_3); // scope 1 at $DIR/lower_intrinsics.rs:19:40: 19:41
|
StorageDead(_2); // scope 0 at $DIR/lower_intrinsics.rs:19:31: 19:32
|
||||||
StorageDead(_2); // scope 0 at $DIR/lower_intrinsics.rs:19:43: 19:44
|
|
||||||
_0 = const (); // scope 0 at $DIR/lower_intrinsics.rs:18:24: 20:2
|
|
||||||
goto -> bb2; // scope 0 at $DIR/lower_intrinsics.rs:20:1: 20:2
|
goto -> bb2; // scope 0 at $DIR/lower_intrinsics.rs:20:1: 20:2
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub fn size_of<T>() -> usize {
|
||||||
|
|
||||||
// EMIT_MIR lower_intrinsics.forget.LowerIntrinsics.diff
|
// EMIT_MIR lower_intrinsics.forget.LowerIntrinsics.diff
|
||||||
pub fn forget<T>(t: T) {
|
pub fn forget<T>(t: T) {
|
||||||
unsafe { core::intrinsics::forget(t) };
|
core::intrinsics::forget(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
// EMIT_MIR lower_intrinsics.unreachable.LowerIntrinsics.diff
|
// EMIT_MIR lower_intrinsics.unreachable.LowerIntrinsics.diff
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
// check-pass
|
|
||||||
// This test should stop compiling
|
|
||||||
// we decide to enable this lint for item statements.
|
|
||||||
|
|
||||||
#![deny(redundant_semicolons)]
|
#![deny(redundant_semicolons)]
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
fn inner() {};
|
fn inner() {}; //~ ERROR unnecessary
|
||||||
struct Bar {};
|
struct Bar {}; //~ ERROR unnecessary
|
||||||
}
|
}
|
||||||
|
|
20
src/test/ui/lint/redundant-semicolon/item-stmt-semi.stderr
Normal file
20
src/test/ui/lint/redundant-semicolon/item-stmt-semi.stderr
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
error: unnecessary trailing semicolon
|
||||||
|
--> $DIR/item-stmt-semi.rs:4:18
|
||||||
|
|
|
||||||
|
LL | fn inner() {};
|
||||||
|
| ^ help: remove this semicolon
|
||||||
|
|
|
||||||
|
note: the lint level is defined here
|
||||||
|
--> $DIR/item-stmt-semi.rs:1:9
|
||||||
|
|
|
||||||
|
LL | #![deny(redundant_semicolons)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: unnecessary trailing semicolon
|
||||||
|
--> $DIR/item-stmt-semi.rs:5:18
|
||||||
|
|
|
||||||
|
LL | struct Bar {};
|
||||||
|
| ^ help: remove this semicolon
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors
|
||||||
|
|
|
@ -39,7 +39,7 @@ fn main() {
|
||||||
B(i32),
|
B(i32),
|
||||||
C,
|
C,
|
||||||
D,
|
D,
|
||||||
};
|
}
|
||||||
let x = E::A(2);
|
let x = E::A(2);
|
||||||
{
|
{
|
||||||
// lint
|
// lint
|
||||||
|
|
|
@ -51,7 +51,7 @@ fn main() {
|
||||||
B(i32),
|
B(i32),
|
||||||
C,
|
C,
|
||||||
D,
|
D,
|
||||||
};
|
}
|
||||||
let x = E::A(2);
|
let x = E::A(2);
|
||||||
{
|
{
|
||||||
// lint
|
// lint
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue