1
Fork 0

Ignore automatically derived impls of Clone and Debug in dead code analysis

This commit is contained in:
Fabian Wolff 2021-05-21 19:35:49 +02:00
parent 497ee321af
commit 79adda930f
54 changed files with 225 additions and 107 deletions

View file

@ -16,7 +16,6 @@ macro_rules! declare_features {
since: $ver, since: $ver,
issue: to_nonzero($issue), issue: to_nonzero($issue),
edition: None, edition: None,
description: concat!($($doc,)*),
} }
),+ ),+
]; ];

View file

@ -37,7 +37,6 @@ macro_rules! declare_features {
since: $ver, since: $ver,
issue: to_nonzero($issue), issue: to_nonzero($issue),
edition: $edition, edition: $edition,
description: concat!($($doc,)*),
} }
),+]; ),+];

View file

@ -453,6 +453,9 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
), ),
// Enumerates "identity-like" conversion methods to suggest on type mismatch. // Enumerates "identity-like" conversion methods to suggest on type mismatch.
rustc_attr!(rustc_conversion_suggestion, Normal, template!(Word), INTERNAL_UNSTABLE), rustc_attr!(rustc_conversion_suggestion, Normal, template!(Word), INTERNAL_UNSTABLE),
// Prevents field reads in the marked trait or method to be considered
// during dead code analysis.
rustc_attr!(rustc_trivial_field_reads, Normal, template!(Word), INTERNAL_UNSTABLE),
// ========================================================================== // ==========================================================================
// Internal attributes, Const related: // Internal attributes, Const related:

View file

@ -51,7 +51,6 @@ pub struct Feature {
pub since: &'static str, pub since: &'static str,
issue: Option<NonZeroU32>, issue: Option<NonZeroU32>,
pub edition: Option<Edition>, pub edition: Option<Edition>,
description: &'static str,
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]

View file

@ -16,7 +16,6 @@ macro_rules! declare_features {
since: $ver, since: $ver,
issue: to_nonzero($issue), issue: to_nonzero($issue),
edition: None, edition: None,
description: concat!($($doc,)*),
} }
),+ ),+
]; ];
@ -34,7 +33,6 @@ macro_rules! declare_features {
since: $ver, since: $ver,
issue: to_nonzero($issue), issue: to_nonzero($issue),
edition: None, edition: None,
description: concat!($($doc,)*),
} }
),+ ),+
]; ];

View file

@ -62,13 +62,6 @@ fn hash_body(
stable_hasher.finish() stable_hasher.finish()
} }
/// Represents an entry and its parent `HirId`.
#[derive(Copy, Clone, Debug)]
pub struct Entry<'hir> {
parent: HirId,
node: Node<'hir>,
}
impl<'a, 'hir> NodeCollector<'a, 'hir> { impl<'a, 'hir> NodeCollector<'a, 'hir> {
pub(super) fn root( pub(super) fn root(
sess: &'a Session, sess: &'a Session,

View file

@ -28,7 +28,6 @@ fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
/// things (e.g., each `DefId`/`DefPath` is only hashed once). /// things (e.g., each `DefId`/`DefPath` is only hashed once).
#[derive(Clone)] #[derive(Clone)]
pub struct StableHashingContext<'a> { pub struct StableHashingContext<'a> {
sess: &'a Session,
definitions: &'a Definitions, definitions: &'a Definitions,
cstore: &'a dyn CrateStore, cstore: &'a dyn CrateStore,
pub(super) body_resolver: BodyResolver<'a>, pub(super) body_resolver: BodyResolver<'a>,
@ -78,7 +77,6 @@ impl<'a> StableHashingContext<'a> {
!always_ignore_spans && !sess.opts.debugging_opts.incremental_ignore_spans; !always_ignore_spans && !sess.opts.debugging_opts.incremental_ignore_spans;
StableHashingContext { StableHashingContext {
sess,
body_resolver: BodyResolver(krate), body_resolver: BodyResolver(krate),
definitions, definitions,
cstore, cstore,

View file

@ -900,10 +900,7 @@ fn traverse_candidate<'pat, 'tcx: 'pat, C, T, I>(
struct Binding<'tcx> { struct Binding<'tcx> {
span: Span, span: Span,
source: Place<'tcx>, source: Place<'tcx>,
name: Symbol,
var_id: HirId, var_id: HirId,
var_ty: Ty<'tcx>,
mutability: Mutability,
binding_mode: BindingMode, binding_mode: BindingMode,
} }

View file

@ -176,17 +176,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Ok(()) Ok(())
} }
PatKind::Binding { name, mutability, mode, var, ty, ref subpattern, is_primary: _ } => { PatKind::Binding {
name: _,
mutability: _,
mode,
var,
ty: _,
ref subpattern,
is_primary: _,
} => {
if let Ok(place_resolved) = if let Ok(place_resolved) =
match_pair.place.clone().try_upvars_resolved(self.tcx, self.typeck_results) match_pair.place.clone().try_upvars_resolved(self.tcx, self.typeck_results)
{ {
candidate.bindings.push(Binding { candidate.bindings.push(Binding {
name,
mutability,
span: match_pair.pattern.span, span: match_pair.pattern.span,
source: place_resolved.into_place(self.tcx, self.typeck_results), source: place_resolved.into_place(self.tcx, self.typeck_results),
var_id: var, var_id: var,
var_ty: ty,
binding_mode: mode, binding_mode: mode,
}); });
} }

View file

@ -118,9 +118,6 @@ struct Scope {
/// the region span of this scope within source code. /// the region span of this scope within source code.
region_scope: region::Scope, region_scope: region::Scope,
/// the span of that region_scope
region_scope_span: Span,
/// set of places to drop when exiting this scope. This starts /// set of places to drop when exiting this scope. This starts
/// out empty but grows as variables are declared during the /// out empty but grows as variables are declared during the
/// building process. This is a stack, so we always drop from the /// building process. This is a stack, so we always drop from the
@ -420,7 +417,6 @@ impl<'tcx> Scopes<'tcx> {
self.scopes.push(Scope { self.scopes.push(Scope {
source_scope: vis_scope, source_scope: vis_scope,
region_scope: region_scope.0, region_scope: region_scope.0,
region_scope_span: region_scope.1.span,
drops: vec![], drops: vec![],
moved_locals: vec![], moved_locals: vec![],
cached_unwind_block: None, cached_unwind_block: None,

View file

@ -263,7 +263,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
} }
if let Err(e) = result { if let Err(e) = result {
bug!("Error processing: {:?}: {:?}", self.mir_body.source.def_id(), e) bug!("Error processing: {:?}: {:?}", self.mir_body.source.def_id(), e.message)
}; };
// Depending on current `debug_options()`, `alert_on_unused_expressions()` could panic, so // Depending on current `debug_options()`, `alert_on_unused_expressions()` could panic, so

View file

@ -239,7 +239,69 @@ impl<'tcx> MarkSymbolVisitor<'tcx> {
} }
} }
/// Automatically generated items marked with `rustc_trivial_field_reads`
/// will be ignored for the purposes of dead code analysis (see PR #85200
/// for discussion).
fn should_ignore_item(&self, def_id: DefId) -> bool {
if !self.tcx.has_attr(def_id, sym::automatically_derived)
&& !self
.tcx
.impl_of_method(def_id)
.map_or(false, |impl_id| self.tcx.has_attr(impl_id, sym::automatically_derived))
{
return false;
}
let has_attr = |def_id| self.tcx.has_attr(def_id, sym::rustc_trivial_field_reads);
if has_attr(def_id) {
return true;
}
if let Some(impl_of) = self.tcx.impl_of_method(def_id) {
if has_attr(impl_of) {
return true;
}
if let Some(trait_of) = self.tcx.trait_id_of_impl(impl_of) {
if has_attr(trait_of) {
return true;
}
if let Some(method_ident) = self.tcx.opt_item_name(def_id) {
if let Some(trait_method) = self
.tcx
.associated_items(trait_of)
.find_by_name_and_kind(self.tcx, method_ident, ty::AssocKind::Fn, trait_of)
{
if has_attr(trait_method.def_id) {
return true;
}
}
}
}
} else if let Some(trait_of) = self.tcx.trait_of_item(def_id) {
if has_attr(trait_of) {
return true;
}
}
return false;
}
fn visit_node(&mut self, node: Node<'tcx>) { fn visit_node(&mut self, node: Node<'tcx>) {
if let Some(item_def_id) = match node {
Node::Item(hir::Item { def_id, .. })
| Node::ForeignItem(hir::ForeignItem { def_id, .. })
| Node::TraitItem(hir::TraitItem { def_id, .. })
| Node::ImplItem(hir::ImplItem { def_id, .. }) => Some(def_id.to_def_id()),
_ => None,
} {
if self.should_ignore_item(item_def_id) {
return;
}
}
let had_repr_c = self.repr_has_repr_c; let had_repr_c = self.repr_has_repr_c;
let had_inherited_pub_visibility = self.inherited_pub_visibility; let had_inherited_pub_visibility = self.inherited_pub_visibility;
let had_pub_visibility = self.pub_visibility; let had_pub_visibility = self.pub_visibility;

View file

@ -1134,6 +1134,7 @@ symbols! {
rustc_synthetic, rustc_synthetic,
rustc_test_marker, rustc_test_marker,
rustc_then_this_would_need, rustc_then_this_would_need,
rustc_trivial_field_reads,
rustc_unsafe_specialization_marker, rustc_unsafe_specialization_marker,
rustc_variance, rustc_variance,
rustdoc, rustdoc,

View file

@ -105,6 +105,7 @@
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[lang = "clone"] #[lang = "clone"]
#[rustc_diagnostic_item = "Clone"] #[rustc_diagnostic_item = "Clone"]
#[cfg_attr(not(bootstrap), rustc_trivial_field_reads)]
pub trait Clone: Sized { pub trait Clone: Sized {
/// Returns a copy of the value. /// Returns a copy of the value.
/// ///

View file

@ -582,6 +582,7 @@ impl Display for Arguments<'_> {
)] )]
#[doc(alias = "{:?}")] #[doc(alias = "{:?}")]
#[rustc_diagnostic_item = "debug_trait"] #[rustc_diagnostic_item = "debug_trait"]
#[cfg_attr(not(bootstrap), rustc_trivial_field_reads)]
pub trait Debug { pub trait Debug {
/// Formats the value using the given formatter. /// Formats the value using the given formatter.
/// ///

View file

@ -653,6 +653,7 @@ mod debug_list {
fn test_formatting_parameters_are_forwarded() { fn test_formatting_parameters_are_forwarded() {
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)]
struct Foo { struct Foo {
bar: u32, bar: u32,
baz: u32, baz: u32,

View file

@ -468,9 +468,6 @@ struct ProgrammableSink {
// Writes append to this slice // Writes append to this slice
pub buffer: Vec<u8>, pub buffer: Vec<u8>,
// Flush sets this flag
pub flushed: bool,
// If true, writes will always be an error // If true, writes will always be an error
pub always_write_error: bool, pub always_write_error: bool,
@ -520,7 +517,6 @@ impl Write for ProgrammableSink {
if self.always_flush_error { if self.always_flush_error {
Err(io::Error::new(io::ErrorKind::Other, "test - always_flush_error")) Err(io::Error::new(io::ErrorKind::Other, "test - always_flush_error"))
} else { } else {
self.flushed = true;
Ok(()) Ok(())
} }
} }

View file

@ -16,6 +16,7 @@ use parser::compiled::{msys_terminfo, parse};
use searcher::get_dbpath_for_term; use searcher::get_dbpath_for_term;
/// A parsed terminfo database entry. /// A parsed terminfo database entry.
#[allow(unused)]
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct TermInfo { pub(crate) struct TermInfo {
/// Names for the terminal /// Names for the terminal

View file

@ -557,7 +557,6 @@ fn build_macro(
name: Symbol, name: Symbol,
import_def_id: Option<DefId>, import_def_id: Option<DefId>,
) -> clean::ItemKind { ) -> clean::ItemKind {
let imported_from = cx.tcx.crate_name(def_id.krate);
match CStore::from_tcx(cx.tcx).load_macro_untracked(def_id, cx.sess()) { match CStore::from_tcx(cx.tcx).load_macro_untracked(def_id, cx.sess()) {
LoadedMacro::MacroDef(item_def, _) => { LoadedMacro::MacroDef(item_def, _) => {
if let ast::ItemKind::MacroDef(ref def) = item_def.kind { if let ast::ItemKind::MacroDef(ref def) = item_def.kind {
@ -569,7 +568,6 @@ fn build_macro(
def_id, def_id,
cx.tcx.visibility(import_def_id.unwrap_or(def_id)), cx.tcx.visibility(import_def_id.unwrap_or(def_id)),
), ),
imported_from: Some(imported_from),
}) })
} else { } else {
unreachable!() unreachable!()

View file

@ -1859,7 +1859,6 @@ impl Clean<Vec<Item>> for (&hir::Item<'_>, Option<Symbol>) {
} }
ItemKind::Macro(ref macro_def) => MacroItem(Macro { ItemKind::Macro(ref macro_def) => MacroItem(Macro {
source: display_macro_source(cx, name, &macro_def, def_id, &item.vis), source: display_macro_source(cx, name, &macro_def, def_id, &item.vis),
imported_from: None,
}), }),
ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref item_ids) => { ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref item_ids) => {
let items = item_ids let items = item_ids

View file

@ -2202,7 +2202,6 @@ crate struct ImportSource {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
crate struct Macro { crate struct Macro {
crate source: String, crate source: String,
crate imported_from: Option<Symbol>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]

View file

@ -224,7 +224,6 @@ struct AllTypes {
opaque_tys: FxHashSet<ItemEntry>, opaque_tys: FxHashSet<ItemEntry>,
statics: FxHashSet<ItemEntry>, statics: FxHashSet<ItemEntry>,
constants: FxHashSet<ItemEntry>, constants: FxHashSet<ItemEntry>,
keywords: FxHashSet<ItemEntry>,
attributes: FxHashSet<ItemEntry>, attributes: FxHashSet<ItemEntry>,
derives: FxHashSet<ItemEntry>, derives: FxHashSet<ItemEntry>,
trait_aliases: FxHashSet<ItemEntry>, trait_aliases: FxHashSet<ItemEntry>,
@ -245,7 +244,6 @@ impl AllTypes {
opaque_tys: new_set(100), opaque_tys: new_set(100),
statics: new_set(100), statics: new_set(100),
constants: new_set(100), constants: new_set(100),
keywords: new_set(100),
attributes: new_set(100), attributes: new_set(100),
derives: new_set(100), derives: new_set(100),
trait_aliases: new_set(100), trait_aliases: new_set(100),

View file

@ -2,6 +2,7 @@
// Test binary_search_by_key lifetime. Issue #34683 // Test binary_search_by_key lifetime. Issue #34683
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
struct Assignment { struct Assignment {
topic: String, topic: String,

View file

@ -1,4 +1,5 @@
// run-pass // run-pass
#![allow(dead_code)]
#![deny(unused_mut)] #![deny(unused_mut)]
#[derive(Debug)] #[derive(Debug)]

View file

@ -3,6 +3,7 @@
// check-pass // check-pass
#![allow(unreachable_code)] #![allow(unreachable_code)]
#![warn(unused)] #![warn(unused)]
#![allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
struct Point { struct Point {

View file

@ -1,5 +1,5 @@
warning: value captured by `a` is never read warning: value captured by `a` is never read
--> $DIR/liveness.rs:23:9 --> $DIR/liveness.rs:24:9
| |
LL | a = 1; LL | a = 1;
| ^ | ^
@ -13,7 +13,7 @@ LL | #![warn(unused)]
= help: did you mean to capture by reference instead? = help: did you mean to capture by reference instead?
warning: unused variable: `a` warning: unused variable: `a`
--> $DIR/liveness.rs:32:9 --> $DIR/liveness.rs:33:9
| |
LL | a += 1; LL | a += 1;
| ^ | ^
@ -27,7 +27,7 @@ LL | #![warn(unused)]
= help: did you mean to capture by reference instead? = help: did you mean to capture by reference instead?
warning: value assigned to `a` is never read warning: value assigned to `a` is never read
--> $DIR/liveness.rs:52:9 --> $DIR/liveness.rs:53:9
| |
LL | a += 1; LL | a += 1;
| ^ | ^
@ -35,7 +35,7 @@ LL | a += 1;
= help: maybe it is overwritten before being read? = help: maybe it is overwritten before being read?
warning: value assigned to `a` is never read warning: value assigned to `a` is never read
--> $DIR/liveness.rs:76:13 --> $DIR/liveness.rs:77:13
| |
LL | a = Some("d1"); LL | a = Some("d1");
| ^ | ^
@ -43,7 +43,7 @@ LL | a = Some("d1");
= help: maybe it is overwritten before being read? = help: maybe it is overwritten before being read?
warning: value assigned to `b` is never read warning: value assigned to `b` is never read
--> $DIR/liveness.rs:84:13 --> $DIR/liveness.rs:85:13
| |
LL | b = Some("e1"); LL | b = Some("e1");
| ^ | ^
@ -51,7 +51,7 @@ LL | b = Some("e1");
= help: maybe it is overwritten before being read? = help: maybe it is overwritten before being read?
warning: value assigned to `b` is never read warning: value assigned to `b` is never read
--> $DIR/liveness.rs:86:13 --> $DIR/liveness.rs:87:13
| |
LL | b = Some("e2"); LL | b = Some("e2");
| ^ | ^
@ -59,7 +59,7 @@ LL | b = Some("e2");
= help: maybe it is overwritten before being read? = help: maybe it is overwritten before being read?
warning: unused variable: `b` warning: unused variable: `b`
--> $DIR/liveness.rs:84:13 --> $DIR/liveness.rs:85:13
| |
LL | b = Some("e1"); LL | b = Some("e1");
| ^ | ^

View file

@ -2,6 +2,7 @@
// check-pass // check-pass
#![warn(unused)] #![warn(unused)]
#![allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
struct MyStruct { struct MyStruct {

View file

@ -1,5 +1,5 @@
warning: value assigned to `a` is never read warning: value assigned to `a` is never read
--> $DIR/liveness_unintentional_copy.rs:19:9 --> $DIR/liveness_unintentional_copy.rs:20:9
| |
LL | a = s; LL | a = s;
| ^ | ^
@ -13,7 +13,7 @@ LL | #![warn(unused)]
= help: maybe it is overwritten before being read? = help: maybe it is overwritten before being read?
warning: unused variable: `a` warning: unused variable: `a`
--> $DIR/liveness_unintentional_copy.rs:19:9 --> $DIR/liveness_unintentional_copy.rs:20:9
| |
LL | a = s; LL | a = s;
| ^ | ^
@ -27,7 +27,7 @@ LL | #![warn(unused)]
= help: did you mean to capture by reference instead? = help: did you mean to capture by reference instead?
warning: unused variable: `a` warning: unused variable: `a`
--> $DIR/liveness_unintentional_copy.rs:35:9 --> $DIR/liveness_unintentional_copy.rs:36:9
| |
LL | a += x; LL | a += x;
| ^ | ^

View file

@ -1,6 +1,7 @@
// edition:2021 // edition:2021
//check-pass //check-pass
#![warn(unused)] #![warn(unused)]
#![allow(dead_code)]
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]

View file

@ -3,6 +3,8 @@
// Test that we can use raw ptrs when using `capture_disjoint_fields`. // Test that we can use raw ptrs when using `capture_disjoint_fields`.
#![allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
struct S { struct S {
s: String, s: String,

View file

@ -1,4 +1,7 @@
// run-pass // run-pass
#![allow(dead_code)]
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Debug)] #[derive(Debug)]

View file

@ -0,0 +1,45 @@
// Checks that derived implementations of Clone and Debug do not
// contribute to dead code analysis (issue #84647).
#![forbid(dead_code)]
struct A { f: () }
//~^ ERROR: field is never read: `f`
#[derive(Clone)]
struct B { f: () }
//~^ ERROR: field is never read: `f`
#[derive(Debug)]
struct C { f: () }
//~^ ERROR: field is never read: `f`
#[derive(Debug,Clone)]
struct D { f: () }
//~^ ERROR: field is never read: `f`
struct E { f: () }
//~^ ERROR: field is never read: `f`
// Custom impl, still doesn't read f
impl Clone for E {
fn clone(&self) -> Self {
Self { f: () }
}
}
struct F { f: () }
// Custom impl that actually reads f
impl Clone for F {
fn clone(&self) -> Self {
Self { f: self.f }
}
}
fn main() {
let _ = A { f: () };
let _ = B { f: () };
let _ = C { f: () };
let _ = D { f: () };
let _ = E { f: () };
let _ = F { f: () };
}

View file

@ -0,0 +1,38 @@
error: field is never read: `f`
--> $DIR/clone-debug-dead-code.rs:6:12
|
LL | struct A { f: () }
| ^^^^^
|
note: the lint level is defined here
--> $DIR/clone-debug-dead-code.rs:4:11
|
LL | #![forbid(dead_code)]
| ^^^^^^^^^
error: field is never read: `f`
--> $DIR/clone-debug-dead-code.rs:10:12
|
LL | struct B { f: () }
| ^^^^^
error: field is never read: `f`
--> $DIR/clone-debug-dead-code.rs:14:12
|
LL | struct C { f: () }
| ^^^^^
error: field is never read: `f`
--> $DIR/clone-debug-dead-code.rs:18:12
|
LL | struct D { f: () }
| ^^^^^
error: field is never read: `f`
--> $DIR/clone-debug-dead-code.rs:21:12
|
LL | struct E { f: () }
| ^^^^^
error: aborting due to 5 previous errors

View file

@ -1,6 +1,8 @@
// run-pass // run-pass
// pretty-expanded FIXME #23616 // pretty-expanded FIXME #23616
#![allow(dead_code)]
#[derive(Clone)] #[derive(Clone)]
struct S<T> { struct S<T> {
foo: (), foo: (),

View file

@ -1,6 +1,8 @@
// run-pass // run-pass
// pretty-expanded FIXME #23616 // pretty-expanded FIXME #23616
#![allow(dead_code)]
#[derive(Clone)] #[derive(Clone)]
struct S { struct S {
_int: isize, _int: isize,

View file

@ -1,6 +1,8 @@
// run-pass // run-pass
// pretty-expanded FIXME #23616 // pretty-expanded FIXME #23616
#![allow(dead_code)]
#[derive(Clone)] #[derive(Clone)]
struct S((), ()); struct S((), ());

View file

@ -1,4 +1,7 @@
// run-pass // run-pass
#![allow(dead_code)]
pub fn main() { pub fn main() {
#[derive(Debug)] #[derive(Debug)]
struct Foo { struct Foo {

View file

@ -1,4 +1,7 @@
// run-pass // run-pass
#![allow(dead_code)]
trait Trait { fn dummy(&self) { } } trait Trait { fn dummy(&self) { } }
#[derive(Debug)] #[derive(Debug)]

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![feature(box_syntax)] #![feature(box_syntax)]
#![allow(dead_code)]
trait T { trait T {
fn print(&self); fn print(&self);

View file

@ -1,6 +1,7 @@
// run-pass // run-pass
#![allow(non_upper_case_globals)] #![allow(non_upper_case_globals)]
#![allow(dead_code)]
/*! /*!
* On x86_64-linux-gnu and possibly other platforms, structs get 8-byte "preferred" alignment, * On x86_64-linux-gnu and possibly other platforms, structs get 8-byte "preferred" alignment,
* but their "ABI" alignment (i.e., what actually matters for data layout) is the largest alignment * but their "ABI" alignment (i.e., what actually matters for data layout) is the largest alignment

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![allow(unused_mut)] #![allow(unused_mut)]
#![allow(dead_code)]
#![feature(box_syntax)] #![feature(box_syntax)]
#[derive(Clone)] #[derive(Clone)]

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![allow(unused_mut)] #![allow(unused_mut)]
#![allow(dead_code)]
#![feature(box_syntax)] #![feature(box_syntax)]
#[derive(Clone)] #[derive(Clone)]

View file

@ -1,5 +1,7 @@
// run-pass // run-pass
#![allow(dead_code)]
use std::rc::Rc; use std::rc::Rc;
use std::ops::Deref; use std::ops::Deref;

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![feature(box_syntax)] #![feature(box_syntax)]
#![allow(dead_code)]
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
struct Foo { struct Foo {

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
#![allow(dead_code)]
trait noisy { trait noisy {
fn speak(&mut self) -> isize; fn speak(&mut self) -> isize;

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
#![allow(dead_code)]
trait noisy { trait noisy {
fn speak(&mut self); fn speak(&mut self);

View file

@ -1,4 +1,7 @@
// run-pass // run-pass
#![allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
struct Foo { struct Foo {
x: isize, x: isize,

View file

@ -1,5 +1,6 @@
// run-pass // run-pass
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
#![allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
enum a_tag { enum a_tag {

View file

@ -29,36 +29,21 @@ declare_clippy_lint! {
"#[macro_use] is no longer needed" "#[macro_use] is no longer needed"
} }
const BRACKETS: &[char] = &['<', '>'];
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
struct PathAndSpan { struct PathAndSpan {
path: String, path: String,
span: Span, span: Span,
} }
/// `MacroRefData` includes the name of the macro /// `MacroRefData` includes the name of the macro.
/// and the path from `SourceMap::span_to_filename`.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MacroRefData { pub struct MacroRefData {
name: String, name: String,
path: String,
} }
impl MacroRefData { impl MacroRefData {
pub fn new(name: String, callee: Span, cx: &LateContext<'_>) -> Self { pub fn new(name: String) -> Self {
let sm = cx.sess().source_map(); Self { name }
let mut path = sm.filename_for_diagnostics(&sm.span_to_filename(callee)).to_string();
// std lib paths are <::std::module::file type>
// so remove brackets, space and type.
if path.contains('<') {
path = path.replace(BRACKETS, "");
}
if path.contains(' ') {
path = path.split(' ').next().unwrap().to_string();
}
Self { name, path }
} }
} }
@ -78,7 +63,7 @@ impl MacroUseImports {
fn push_unique_macro(&mut self, cx: &LateContext<'_>, span: Span) { fn push_unique_macro(&mut self, cx: &LateContext<'_>, span: Span) {
let call_site = span.source_callsite(); let call_site = span.source_callsite();
let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_"); let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_");
if let Some(callee) = span.source_callee() { if let Some(_callee) = span.source_callee() {
if !self.collected.contains(&call_site) { if !self.collected.contains(&call_site) {
let name = if name.contains("::") { let name = if name.contains("::") {
name.split("::").last().unwrap().to_string() name.split("::").last().unwrap().to_string()
@ -86,7 +71,7 @@ impl MacroUseImports {
name.to_string() name.to_string()
}; };
self.mac_refs.push(MacroRefData::new(name, callee.def_site, cx)); self.mac_refs.push(MacroRefData::new(name));
self.collected.insert(call_site); self.collected.insert(call_site);
} }
} }
@ -95,10 +80,10 @@ impl MacroUseImports {
fn push_unique_macro_pat_ty(&mut self, cx: &LateContext<'_>, span: Span) { fn push_unique_macro_pat_ty(&mut self, cx: &LateContext<'_>, span: Span) {
let call_site = span.source_callsite(); let call_site = span.source_callsite();
let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_"); let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_");
if let Some(callee) = span.source_callee() { if let Some(_callee) = span.source_callee() {
if !self.collected.contains(&call_site) { if !self.collected.contains(&call_site) {
self.mac_refs self.mac_refs
.push(MacroRefData::new(name.to_string(), callee.def_site, cx)); .push(MacroRefData::new(name.to_string()));
self.collected.insert(call_site); self.collected.insert(call_site);
} }
} }

View file

@ -3,8 +3,7 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::{match_def_path, paths}; use clippy_utils::{match_def_path, paths};
use if_chain::if_chain; use if_chain::if_chain;
use rustc_ast::ast::{LitKind, StrStyle}; use rustc_ast::ast::{LitKind, StrStyle};
use rustc_data_structures::fx::FxHashSet; use rustc_hir::{BorrowKind, Expr, ExprKind};
use rustc_hir::{BorrowKind, Expr, ExprKind, HirId};
use rustc_lint::{LateContext, LateLintPass}; use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::{BytePos, Span}; use rustc_span::source_map::{BytePos, Span};
@ -53,10 +52,7 @@ declare_clippy_lint! {
} }
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct Regex { pub struct Regex {}
spans: FxHashSet<Span>,
last: Option<HirId>,
}
impl_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]); impl_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]);

View file

@ -1,6 +1,6 @@
// run-rustfix // run-rustfix
#![allow(unused_imports)] #![allow(unused_imports,dead_code)]
#![deny(clippy::default_trait_access)] #![deny(clippy::default_trait_access)]
use std::default; use std::default;

View file

@ -1,6 +1,6 @@
// run-rustfix // run-rustfix
#![allow(unused_imports)] #![allow(unused_imports,dead_code)]
#![deny(clippy::default_trait_access)] #![deny(clippy::default_trait_access)]
use std::default; use std::default;

View file

@ -762,7 +762,6 @@ impl MacroArgKind {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct ParsedMacroArg { struct ParsedMacroArg {
kind: MacroArgKind, kind: MacroArgKind,
span: Span,
} }
impl ParsedMacroArg { impl ParsedMacroArg {
@ -780,14 +779,10 @@ impl ParsedMacroArg {
struct MacroArgParser { struct MacroArgParser {
/// Either a name of the next metavariable, a separator, or junk. /// Either a name of the next metavariable, a separator, or junk.
buf: String, buf: String,
/// The start position on the current buffer.
lo: BytePos,
/// The first token of the current buffer. /// The first token of the current buffer.
start_tok: Token, start_tok: Token,
/// `true` if we are parsing a metavariable or a repeat. /// `true` if we are parsing a metavariable or a repeat.
is_meta_var: bool, is_meta_var: bool,
/// The position of the last token.
hi: BytePos,
/// The last token parsed. /// The last token parsed.
last_tok: Token, last_tok: Token,
/// Holds the parsed arguments. /// Holds the parsed arguments.
@ -807,8 +802,6 @@ fn last_tok(tt: &TokenTree) -> Token {
impl MacroArgParser { impl MacroArgParser {
fn new() -> MacroArgParser { fn new() -> MacroArgParser {
MacroArgParser { MacroArgParser {
lo: BytePos(0),
hi: BytePos(0),
buf: String::new(), buf: String::new(),
is_meta_var: false, is_meta_var: false,
last_tok: Token { last_tok: Token {
@ -824,7 +817,6 @@ impl MacroArgParser {
} }
fn set_last_tok(&mut self, tok: &TokenTree) { fn set_last_tok(&mut self, tok: &TokenTree) {
self.hi = tok.span().hi();
self.last_tok = last_tok(tok); self.last_tok = last_tok(tok);
} }
@ -836,7 +828,6 @@ impl MacroArgParser {
}; };
self.result.push(ParsedMacroArg { self.result.push(ParsedMacroArg {
kind: MacroArgKind::Separator(self.buf.clone(), prefix), kind: MacroArgKind::Separator(self.buf.clone(), prefix),
span: mk_sp(self.lo, self.hi),
}); });
self.buf.clear(); self.buf.clear();
} }
@ -849,7 +840,6 @@ impl MacroArgParser {
}; };
self.result.push(ParsedMacroArg { self.result.push(ParsedMacroArg {
kind: MacroArgKind::Other(self.buf.clone(), prefix), kind: MacroArgKind::Other(self.buf.clone(), prefix),
span: mk_sp(self.lo, self.hi),
}); });
self.buf.clear(); self.buf.clear();
} }
@ -858,11 +848,10 @@ impl MacroArgParser {
match iter.next() { match iter.next() {
Some(TokenTree::Token(Token { Some(TokenTree::Token(Token {
kind: TokenKind::Ident(name, _), kind: TokenKind::Ident(name, _),
span, ..
})) => { })) => {
self.result.push(ParsedMacroArg { self.result.push(ParsedMacroArg {
kind: MacroArgKind::MetaVariable(name, self.buf.clone()), kind: MacroArgKind::MetaVariable(name, self.buf.clone()),
span: mk_sp(self.lo, span.hi()),
}); });
self.buf.clear(); self.buf.clear();
@ -873,10 +862,9 @@ impl MacroArgParser {
} }
} }
fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken, span: Span) { fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken) {
self.result.push(ParsedMacroArg { self.result.push(ParsedMacroArg {
kind: MacroArgKind::Delimited(delim, inner), kind: MacroArgKind::Delimited(delim, inner),
span,
}); });
} }
@ -886,19 +874,15 @@ impl MacroArgParser {
inner: Vec<ParsedMacroArg>, inner: Vec<ParsedMacroArg>,
delim: DelimToken, delim: DelimToken,
iter: &mut Cursor, iter: &mut Cursor,
span: Span,
) -> Option<()> { ) -> Option<()> {
let mut buffer = String::new(); let mut buffer = String::new();
let mut first = true; let mut first = true;
let mut lo = span.lo();
let mut hi = span.hi();
// Parse '*', '+' or '?. // Parse '*', '+' or '?.
for tok in iter { for tok in iter {
self.set_last_tok(&tok); self.set_last_tok(&tok);
if first { if first {
first = false; first = false;
lo = tok.span().lo();
} }
match tok { match tok {
@ -918,7 +902,6 @@ impl MacroArgParser {
} }
TokenTree::Token(ref t) => { TokenTree::Token(ref t) => {
buffer.push_str(&pprust::token_to_string(&t)); buffer.push_str(&pprust::token_to_string(&t));
hi = t.span.hi();
} }
_ => return None, _ => return None,
} }
@ -930,20 +913,17 @@ impl MacroArgParser {
} else { } else {
Some(Box::new(ParsedMacroArg { Some(Box::new(ParsedMacroArg {
kind: MacroArgKind::Other(buffer, "".to_owned()), kind: MacroArgKind::Other(buffer, "".to_owned()),
span: mk_sp(lo, hi),
})) }))
}; };
self.result.push(ParsedMacroArg { self.result.push(ParsedMacroArg {
kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()), kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()),
span: mk_sp(self.lo, self.hi),
}); });
Some(()) Some(())
} }
fn update_buffer(&mut self, t: &Token) { fn update_buffer(&mut self, t: &Token) {
if self.buf.is_empty() { if self.buf.is_empty() {
self.lo = t.span.lo();
self.start_tok = t.clone(); self.start_tok = t.clone();
} else { } else {
let needs_space = match next_space(&self.last_tok.kind) { let needs_space = match next_space(&self.last_tok.kind) {
@ -999,7 +979,6 @@ impl MacroArgParser {
// Start keeping the name of this metavariable in the buffer. // Start keeping the name of this metavariable in the buffer.
self.is_meta_var = true; self.is_meta_var = true;
self.lo = span.lo();
self.start_tok = Token { self.start_tok = Token {
kind: TokenKind::Dollar, kind: TokenKind::Dollar,
span, span,
@ -1012,7 +991,7 @@ impl MacroArgParser {
self.add_meta_variable(&mut iter)?; self.add_meta_variable(&mut iter)?;
} }
TokenTree::Token(ref t) => self.update_buffer(t), TokenTree::Token(ref t) => self.update_buffer(t),
TokenTree::Delimited(delimited_span, delimited, ref tts) => { TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
if !self.buf.is_empty() { if !self.buf.is_empty() {
if next_space(&self.last_tok.kind) == SpaceState::Always { if next_space(&self.last_tok.kind) == SpaceState::Always {
self.add_separator(); self.add_separator();
@ -1022,16 +1001,14 @@ impl MacroArgParser {
} }
// Parse the stuff inside delimiters. // Parse the stuff inside delimiters.
let mut parser = MacroArgParser::new(); let parser = MacroArgParser::new();
parser.lo = delimited_span.open.lo();
let delimited_arg = parser.parse(tts.clone())?; let delimited_arg = parser.parse(tts.clone())?;
let span = delimited_span.entire();
if self.is_meta_var { if self.is_meta_var {
self.add_repeat(delimited_arg, delimited, &mut iter, span)?; self.add_repeat(delimited_arg, delimited, &mut iter)?;
self.is_meta_var = false; self.is_meta_var = false;
} else { } else {
self.add_delimited(delimited_arg, delimited, span); self.add_delimited(delimited_arg, delimited);
} }
} }
} }

View file

@ -27,7 +27,6 @@ type FileModMap<'ast> = BTreeMap<FileName, Module<'ast>>;
pub(crate) struct Module<'a> { pub(crate) struct Module<'a> {
ast_mod_kind: Option<Cow<'a, ast::ModKind>>, ast_mod_kind: Option<Cow<'a, ast::ModKind>>,
pub(crate) items: Cow<'a, Vec<rustc_ast::ptr::P<ast::Item>>>, pub(crate) items: Cow<'a, Vec<rustc_ast::ptr::P<ast::Item>>>,
attrs: Cow<'a, Vec<ast::Attribute>>,
inner_attr: Vec<ast::Attribute>, inner_attr: Vec<ast::Attribute>,
pub(crate) span: Span, pub(crate) span: Span,
} }
@ -46,7 +45,6 @@ impl<'a> Module<'a> {
.collect(); .collect();
Module { Module {
items: mod_items, items: mod_items,
attrs: mod_attrs,
inner_attr, inner_attr,
span: mod_span, span: mod_span,
ast_mod_kind, ast_mod_kind,