Allow unused tuple struct fields in Miri tests
Otherwise the newly-detected dead code causes new output
This commit is contained in:
parent
93f553049b
commit
10f4023192
21 changed files with 23 additions and 23 deletions
|
@ -3,7 +3,7 @@
|
|||
|
||||
//@[stack]error-in-other-file: which is strongly protected
|
||||
//@[tree]error-in-other-file: /deallocation through .* is forbidden/
|
||||
struct Newtype<'a>(&'a mut i32, i32);
|
||||
struct Newtype<'a>(#[allow(dead_code)] &'a mut i32, #[allow(dead_code)] i32);
|
||||
|
||||
fn dealloc_while_running(_n: Newtype<'_>, dealloc: impl FnOnce()) {
|
||||
dealloc();
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
//@[stack]error-in-other-file: which is strongly protected
|
||||
//@[tree]error-in-other-file: /deallocation through .* is forbidden/
|
||||
|
||||
struct Newtype<'a>(&'a mut i32);
|
||||
struct Newtype<'a>(#[allow(dead_code)] &'a mut i32);
|
||||
|
||||
fn dealloc_while_running(_n: Newtype<'_>, dealloc: impl FnOnce()) {
|
||||
dealloc();
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
use std::sync::atomic::{AtomicI64, Ordering};
|
||||
|
||||
#[repr(align(8))]
|
||||
struct AlignedI64(i64);
|
||||
struct AlignedI64(#[allow(dead_code)] i64);
|
||||
|
||||
fn main() {
|
||||
static X: AlignedI64 = AlignedI64(0);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// should find the bug even without retagging
|
||||
//@compile-flags: -Zmiri-disable-stacked-borrows
|
||||
|
||||
struct SliceWithHead(u8, [u8]);
|
||||
struct SliceWithHead(#[allow(dead_code)] u8, #[allow(dead_code)] [u8]);
|
||||
|
||||
fn main() {
|
||||
let buf = [0u32; 1];
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
// Some targets treat arrays and structs very differently. We would probably catch that on those
|
||||
// targets since we check the `PassMode`; here we ensure that we catch it on *all* targets
|
||||
// (in particular, on x86-64 the pass mode is `Indirect` for both of these).
|
||||
struct S(i32, i32, i32, i32);
|
||||
struct S(#[allow(dead_code)] i32, #[allow(dead_code)] i32, #[allow(dead_code)] i32, #[allow(dead_code)] i32);
|
||||
type A = [i32; 4];
|
||||
|
||||
fn main() {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
trait Empty {}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct FunnyPointer(dyn Empty);
|
||||
pub struct FunnyPointer(#[allow(dead_code)] dyn Empty);
|
||||
|
||||
#[repr(C)]
|
||||
pub struct Meta {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//@compile-flags: -Cdebug-assertions=no
|
||||
|
||||
#[repr(transparent)]
|
||||
struct HasDrop(u8);
|
||||
struct HasDrop(#[allow(dead_code)] u8);
|
||||
|
||||
impl Drop for HasDrop {
|
||||
fn drop(&mut self) {}
|
||||
|
|
|
@ -7,7 +7,7 @@ mod utils;
|
|||
|
||||
#[repr(align(8))]
|
||||
#[derive(Copy, Clone)]
|
||||
struct Align8(u64);
|
||||
struct Align8(#[allow(dead_code)] u64);
|
||||
|
||||
fn main() {
|
||||
let buffer = [0u32; 128]; // get some 4-aligned memory
|
||||
|
@ -35,7 +35,7 @@ fn main() {
|
|||
if cfg!(read_unaligned_ptr) {
|
||||
#[repr(align(16))]
|
||||
#[derive(Copy, Clone)]
|
||||
struct Align16(u128);
|
||||
struct Align16(#[allow(dead_code)] u128);
|
||||
|
||||
let align16 = if align8.addr() % 16 == 0 { align8 } else { align8.wrapping_add(2) };
|
||||
assert!(align16.addr() % 16 == 0);
|
||||
|
|
|
@ -46,7 +46,7 @@ fn test_align_to() {
|
|||
{
|
||||
#[repr(align(8))]
|
||||
#[derive(Copy, Clone)]
|
||||
struct Align8(u64);
|
||||
struct Align8(#[allow(dead_code)] u64);
|
||||
|
||||
let (_l, m, _r) = unsafe { s.align_to::<Align8>() };
|
||||
assert!(m.len() > 0);
|
||||
|
@ -97,7 +97,7 @@ fn huge_align() {
|
|||
const SIZE: usize = 1 << 30;
|
||||
#[cfg(target_pointer_width = "16")]
|
||||
const SIZE: usize = 1 << 13;
|
||||
struct HugeSize([u8; SIZE - 1]);
|
||||
struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]);
|
||||
let _ = std::ptr::invalid::<HugeSize>(SIZE).align_offset(SIZE);
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ fn test1() {
|
|||
}
|
||||
|
||||
// Make the allocator itself so big that the Box is not even a ScalarPair any more.
|
||||
struct OnceAllocRef<'s, 'a>(&'s OnceAlloc<'a>, u64);
|
||||
struct OnceAllocRef<'s, 'a>(&'s OnceAlloc<'a>, #[allow(dead_code)] u64);
|
||||
|
||||
unsafe impl<'shared, 'a: 'shared> Allocator for OnceAllocRef<'shared, 'a> {
|
||||
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
|
|
|
@ -46,7 +46,7 @@ fn boxed_pair_to_vec() {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Foo(u64);
|
||||
struct Foo(#[allow(dead_code)] u64);
|
||||
fn reinterstruct(box_pair: Box<PairFoo>) -> Vec<Foo> {
|
||||
let ref_pair = Box::leak(box_pair) as *mut PairFoo;
|
||||
let ptr_foo = unsafe { std::ptr::addr_of_mut!((*ref_pair).fst) };
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// test that ordinary fat pointer operations work.
|
||||
|
||||
struct Wrapper<T: ?Sized>(u32, T);
|
||||
struct Wrapper<T: ?Sized>(#[allow(dead_code)] u32, T);
|
||||
|
||||
struct FatPtrContainer<'a> {
|
||||
ptr: &'a [u8],
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
use std::mem;
|
||||
|
||||
#[repr(packed(4))]
|
||||
struct Slice([u32]);
|
||||
struct Slice(#[allow(dead_code)] [u32]);
|
||||
|
||||
#[repr(packed(2), C)]
|
||||
struct PackedSized {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#[repr(u8)]
|
||||
enum Foo {
|
||||
Foo(u8),
|
||||
Foo(#[allow(dead_code)] u8),
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
use std::mem;
|
||||
|
||||
const SZ: usize = 100;
|
||||
struct P<T: ?Sized>([u8; SZ], T);
|
||||
struct P<T: ?Sized>(#[allow(dead_code)] [u8; SZ], T);
|
||||
|
||||
type Ack<T> = P<P<T>>;
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ fn test_inner_packed() {
|
|||
struct Inner(u32);
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Outer(u8, Inner);
|
||||
struct Outer(#[allow(dead_code)] u8, Inner);
|
||||
|
||||
let o = Outer(0, Inner(42));
|
||||
let _x = o.1;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@compile-flags: -Zmiri-retag-fields=none
|
||||
|
||||
struct Newtype<'a>(&'a mut i32);
|
||||
struct Newtype<'a>(#[allow(dead_code)] &'a mut i32);
|
||||
|
||||
fn dealloc_while_running(_n: Newtype<'_>, dealloc: impl FnOnce()) {
|
||||
dealloc();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@compile-flags: -Zmiri-retag-fields=scalar
|
||||
|
||||
struct Newtype<'a>(&'a mut i32, i32, i32);
|
||||
struct Newtype<'a>(#[allow(dead_code)] &'a mut i32, #[allow(dead_code)] i32, #[allow(dead_code)] i32);
|
||||
|
||||
fn dealloc_while_running(_n: Newtype<'_>, dealloc: impl FnOnce()) {
|
||||
dealloc();
|
||||
|
|
|
@ -226,7 +226,7 @@ fn not_unpin_not_protected() {
|
|||
// the self-referential-coroutine situation, it does not seem worth the potential trouble.)
|
||||
use std::marker::PhantomPinned;
|
||||
|
||||
pub struct NotUnpin(i32, PhantomPinned);
|
||||
pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned);
|
||||
|
||||
fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) {
|
||||
// `f` may mutate, but it may not deallocate!
|
||||
|
|
|
@ -318,7 +318,7 @@ fn not_unpin_not_protected() {
|
|||
// the self-referential-coroutine situation, it does not seem worth the potential trouble.)
|
||||
use std::marker::PhantomPinned;
|
||||
|
||||
pub struct NotUnpin(i32, PhantomPinned);
|
||||
pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned);
|
||||
|
||||
fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) {
|
||||
// `f` may mutate, but it may not deallocate!
|
||||
|
|
|
@ -10,7 +10,7 @@ impl Drop for Foo {
|
|||
static mut FOO: bool = false;
|
||||
|
||||
enum Bar {
|
||||
A(Box<i32>),
|
||||
A(#[allow(dead_code)] Box<i32>),
|
||||
B(Foo),
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue