Auto merge of #138267 - matthiaskrgr:rollup-vt76bhs, r=matthiaskrgr
Rollup of 12 pull requests Successful merges: - #136127 (Allow `*const W<dyn A> -> *const dyn A` ptr cast) - #136968 (Turn order dependent trait objects future incompat warning into a hard error) - #137319 (Stabilize `const_vec_string_slice`) - #137885 (tidy: add triagebot checks) - #138040 (compiler: Use `size_of` from the prelude instead of imported) - #138084 (Use workspace lints for crates in `compiler/`) - #138158 (Move more layouting logic to `rustc_abi`) - #138160 (depend more on attr_data_structures and move find_attr! there) - #138192 (crashes: couple more tests) - #138216 (bootstrap: Fix stack printing when a step cycle is detected) - #138232 (Reduce verbosity of GCC build log) - #138242 (Revert "Don't test new error messages with the stage 0 compiler") r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
ed897d5f85
273 changed files with 2403 additions and 1363 deletions
|
@ -63,7 +63,7 @@ rustc_index::newtype_index! {
|
|||
pub struct SerializedDepNodeIndex {}
|
||||
}
|
||||
|
||||
const DEP_NODE_SIZE: usize = std::mem::size_of::<SerializedDepNodeIndex>();
|
||||
const DEP_NODE_SIZE: usize = size_of::<SerializedDepNodeIndex>();
|
||||
/// Amount of padding we need to add to the edge list data so that we can retrieve every
|
||||
/// SerializedDepNodeIndex with a fixed-size read then mask.
|
||||
const DEP_NODE_PAD: usize = DEP_NODE_SIZE - 1;
|
||||
|
@ -175,7 +175,7 @@ impl EdgeHeader {
|
|||
|
||||
#[inline]
|
||||
fn mask(bits: usize) -> usize {
|
||||
usize::MAX >> ((std::mem::size_of::<usize>() * 8) - bits)
|
||||
usize::MAX >> ((size_of::<usize>() * 8) - bits)
|
||||
}
|
||||
|
||||
impl SerializedDepGraph {
|
||||
|
@ -208,9 +208,8 @@ impl SerializedDepGraph {
|
|||
// for a node with length 64, which means the spilled 1-byte leb128 length is 1 byte of at
|
||||
// least (34 byte header + 1 byte len + 64 bytes edge data), which is ~1%. A 2-byte leb128
|
||||
// length is about the same fractional overhead and it amortizes for yet greater lengths.
|
||||
let mut edge_list_data = Vec::with_capacity(
|
||||
graph_bytes - node_count * std::mem::size_of::<SerializedNodeHeader<D>>(),
|
||||
);
|
||||
let mut edge_list_data =
|
||||
Vec::with_capacity(graph_bytes - node_count * size_of::<SerializedNodeHeader<D>>());
|
||||
|
||||
for _index in 0..node_count {
|
||||
// Decode the header for this edge; the header packs together as many of the fixed-size
|
||||
|
@ -300,7 +299,7 @@ struct Unpacked {
|
|||
// M..M+N bytes per index
|
||||
// M+N..16 kind
|
||||
impl<D: Deps> SerializedNodeHeader<D> {
|
||||
const TOTAL_BITS: usize = std::mem::size_of::<DepKind>() * 8;
|
||||
const TOTAL_BITS: usize = size_of::<DepKind>() * 8;
|
||||
const LEN_BITS: usize = Self::TOTAL_BITS - Self::KIND_BITS - Self::WIDTH_BITS;
|
||||
const WIDTH_BITS: usize = DEP_NODE_WIDTH_BITS;
|
||||
const KIND_BITS: usize = Self::TOTAL_BITS - D::DEP_KIND_MAX.leading_zeros() as usize;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
#![feature(hash_raw_entry)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(min_specialization)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod cache;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue