1
Fork 0

Auto merge of #120486 - reitermarkus:use-generic-nonzero, r=dtolnay

Use generic `NonZero` internally.

Tracking issue: https://github.com/rust-lang/rust/issues/120257
This commit is contained in:
bors 2024-02-16 07:46:31 +00:00
commit 1be468815c
144 changed files with 636 additions and 628 deletions

View file

@ -34,6 +34,7 @@
#![feature(discriminant_kind)]
#![feature(exhaustive_patterns)]
#![feature(coroutines)]
#![feature(generic_nonzero)]
#![feature(if_let_guard)]
#![feature(inline_const)]
#![feature(iter_from_coroutine)]

View file

@ -21,7 +21,7 @@ use rustc_session::parse::feature_err_issue;
use rustc_session::Session;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;
use std::num::NonZeroU32;
use std::num::NonZero;
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum StabilityLevel {
@ -102,7 +102,7 @@ pub fn report_unstable(
sess: &Session,
feature: Symbol,
reason: Option<Symbol>,
issue: Option<NonZeroU32>,
issue: Option<NonZero<u32>>,
suggestion: Option<(Span, String, String, Applicability)>,
is_soft: bool,
span: Span,
@ -235,7 +235,7 @@ pub enum EvalResult {
Deny {
feature: Symbol,
reason: Option<Symbol>,
issue: Option<NonZeroU32>,
issue: Option<NonZero<u32>>,
suggestion: Option<(Span, String, String, Applicability)>,
is_soft: bool,
},
@ -433,7 +433,7 @@ impl<'tcx> TyCtxt<'tcx> {
// the `-Z force-unstable-if-unmarked` flag present (we're
// compiling a compiler crate), then let this missing feature
// annotation slide.
if feature == sym::rustc_private && issue == NonZeroU32::new(27812) {
if feature == sym::rustc_private && issue == NonZero::new(27812) {
if self.sess.opts.unstable_opts.force_unstable_if_unmarked {
return EvalResult::Allow;
}

View file

@ -122,7 +122,7 @@ mod value;
use std::fmt;
use std::io;
use std::io::{Read, Write};
use std::num::{NonZeroU32, NonZeroU64};
use std::num::NonZero;
use std::sync::atomic::{AtomicU32, Ordering};
use rustc_ast::LitKind;
@ -206,7 +206,7 @@ pub enum LitToConstError {
}
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct AllocId(pub NonZeroU64);
pub struct AllocId(pub NonZero<u64>);
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
@ -261,7 +261,7 @@ pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>(
}
// Used to avoid infinite recursion when decoding cyclic allocations.
type DecodingSessionId = NonZeroU32;
type DecodingSessionId = NonZero<u32>;
#[derive(Clone)]
enum State {
@ -501,7 +501,7 @@ impl<'tcx> AllocMap<'tcx> {
AllocMap {
alloc_map: Default::default(),
dedup: Default::default(),
next_id: AllocId(NonZeroU64::new(1).unwrap()),
next_id: AllocId(NonZero::new(1).unwrap()),
}
}
fn reserve(&mut self) -> AllocId {

View file

@ -3,7 +3,7 @@ use super::{AllocId, InterpResult};
use rustc_macros::HashStable;
use rustc_target::abi::{HasDataLayout, Size};
use std::{fmt, num::NonZeroU64};
use std::{fmt, num::NonZero};
////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
@ -129,7 +129,7 @@ pub trait Provenance: Copy + fmt::Debug + 'static {
/// The type of provenance in the compile-time interpreter.
/// This is a packed representation of an `AllocId` and an `immutable: bool`.
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct CtfeProvenance(NonZeroU64);
pub struct CtfeProvenance(NonZero<u64>);
impl From<AllocId> for CtfeProvenance {
fn from(value: AllocId) -> Self {
@ -155,7 +155,7 @@ impl CtfeProvenance {
/// Returns the `AllocId` of this provenance.
#[inline(always)]
pub fn alloc_id(self) -> AllocId {
AllocId(NonZeroU64::new(self.0.get() & !IMMUTABLE_MASK).unwrap())
AllocId(NonZero::new(self.0.get() & !IMMUTABLE_MASK).unwrap())
}
/// Returns whether this provenance is immutable.

View file

@ -4,7 +4,7 @@ use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg};
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_target::abi::Size;
use std::fmt;
use std::num::NonZeroU8;
use std::num::NonZero;
use crate::ty::TyCtxt;
@ -132,7 +132,7 @@ pub struct ScalarInt {
/// The first `size` bytes of `data` are the value.
/// Do not try to read less or more bytes than that. The remaining bytes must be 0.
data: u128,
size: NonZeroU8,
size: NonZero<u8>,
}
// Cannot derive these, as the derives take references to the fields, and we
@ -161,14 +161,14 @@ impl<D: Decoder> Decodable<D> for ScalarInt {
let mut data = [0u8; 16];
let size = d.read_u8();
data[..size as usize].copy_from_slice(d.read_raw_bytes(size as usize));
ScalarInt { data: u128::from_le_bytes(data), size: NonZeroU8::new(size).unwrap() }
ScalarInt { data: u128::from_le_bytes(data), size: NonZero::new(size).unwrap() }
}
}
impl ScalarInt {
pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: NonZeroU8::new(1).unwrap() };
pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: NonZero::new(1).unwrap() };
pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: NonZeroU8::new(1).unwrap() };
pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: NonZero::new(1).unwrap() };
#[inline]
pub fn size(self) -> Size {
@ -196,7 +196,7 @@ impl ScalarInt {
#[inline]
pub fn null(size: Size) -> Self {
Self { data: 0, size: NonZeroU8::new(size.bytes() as u8).unwrap() }
Self { data: 0, size: NonZero::new(size.bytes() as u8).unwrap() }
}
#[inline]
@ -208,7 +208,7 @@ impl ScalarInt {
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
let data = i.into();
if size.truncate(data) == data {
Some(Self { data, size: NonZeroU8::new(size.bytes() as u8).unwrap() })
Some(Self { data, size: NonZero::new(size.bytes() as u8).unwrap() })
} else {
None
}
@ -220,7 +220,7 @@ impl ScalarInt {
// `into` performed sign extension, we have to truncate
let truncated = size.truncate(i as u128);
if size.sign_extend(truncated) as i128 == i {
Some(Self { data: truncated, size: NonZeroU8::new(size.bytes() as u8).unwrap() })
Some(Self { data: truncated, size: NonZero::new(size.bytes() as u8).unwrap() })
} else {
None
}
@ -388,7 +388,7 @@ macro_rules! from {
fn from(u: $ty) -> Self {
Self {
data: u128::from(u),
size: NonZeroU8::new(std::mem::size_of::<$ty>() as u8).unwrap(),
size: NonZero::new(std::mem::size_of::<$ty>() as u8).unwrap(),
}
}
}
@ -427,7 +427,7 @@ impl TryFrom<ScalarInt> for bool {
impl From<char> for ScalarInt {
#[inline]
fn from(c: char) -> Self {
Self { data: c as u128, size: NonZeroU8::new(std::mem::size_of::<char>() as u8).unwrap() }
Self { data: c as u128, size: NonZero::new(std::mem::size_of::<char>() as u8).unwrap() }
}
}
@ -454,7 +454,7 @@ impl From<Single> for ScalarInt {
#[inline]
fn from(f: Single) -> Self {
// We trust apfloat to give us properly truncated data.
Self { data: f.to_bits(), size: NonZeroU8::new((Single::BITS / 8) as u8).unwrap() }
Self { data: f.to_bits(), size: NonZero::new((Single::BITS / 8) as u8).unwrap() }
}
}
@ -470,7 +470,7 @@ impl From<Double> for ScalarInt {
#[inline]
fn from(f: Double) -> Self {
// We trust apfloat to give us properly truncated data.
Self { data: f.to_bits(), size: NonZeroU8::new((Double::BITS / 8) as u8).unwrap() }
Self { data: f.to_bits(), size: NonZero::new((Double::BITS / 8) as u8).unwrap() }
}
}

View file

@ -18,7 +18,7 @@ use core::intrinsics;
use std::cmp::Ordering;
use std::marker::PhantomData;
use std::mem;
use std::num::NonZeroUsize;
use std::num::NonZero;
use std::ops::{ControlFlow, Deref};
use std::ptr::NonNull;
@ -143,9 +143,8 @@ impl<'tcx> From<ty::Term<'tcx>> for GenericArg<'tcx> {
impl<'tcx> GenericArg<'tcx> {
#[inline]
pub fn unpack(self) -> GenericArgKind<'tcx> {
let ptr = unsafe {
self.ptr.map_addr(|addr| NonZeroUsize::new_unchecked(addr.get() & !TAG_MASK))
};
let ptr =
unsafe { self.ptr.map_addr(|addr| NonZero::new_unchecked(addr.get() & !TAG_MASK)) };
// SAFETY: use of `Interned::new_unchecked` here is ok because these
// pointers were originally created from `Interned` types in `pack()`,
// and this is just going in the other direction.

View file

@ -20,7 +20,7 @@ use rustc_target::spec::{abi::Abi as SpecAbi, HasTargetSpec, PanicStrategy, Targ
use std::cmp;
use std::fmt;
use std::num::NonZeroUsize;
use std::num::NonZero;
use std::ops::Bound;
pub trait IntegerExt {
@ -761,7 +761,7 @@ where
};
tcx.mk_layout(LayoutS {
variants: Variants::Single { index: variant_index },
fields: match NonZeroUsize::new(fields) {
fields: match NonZero::new(fields) {
Some(fields) => FieldsShape::Union(fields),
None => FieldsShape::Arbitrary { offsets: IndexVec::new(), memory_index: IndexVec::new() },
},

View file

@ -61,7 +61,7 @@ use std::fmt::Debug;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::mem;
use std::num::NonZeroUsize;
use std::num::NonZero;
use std::ops::ControlFlow;
use std::ptr::NonNull;
use std::{fmt, str};
@ -617,9 +617,8 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> Decodable<D> for Term<'tcx> {
impl<'tcx> Term<'tcx> {
#[inline]
pub fn unpack(self) -> TermKind<'tcx> {
let ptr = unsafe {
self.ptr.map_addr(|addr| NonZeroUsize::new_unchecked(addr.get() & !TAG_MASK))
};
let ptr =
unsafe { self.ptr.map_addr(|addr| NonZero::new_unchecked(addr.get() & !TAG_MASK)) };
// SAFETY: use of `Interned::new_unchecked` here is ok because these
// pointers were originally created from `Interned` types in `pack()`,
// and this is just going in the other direction.