Merge from rustc
This commit is contained in:
commit
d12c1f581f
304 changed files with 3973 additions and 2536 deletions
|
@ -19,6 +19,14 @@
|
||||||
# Note that this has no default value (x.py uses the defaults in `bootstrap.example.toml`).
|
# Note that this has no default value (x.py uses the defaults in `bootstrap.example.toml`).
|
||||||
#profile = <none>
|
#profile = <none>
|
||||||
|
|
||||||
|
# Inherits configuration values from different configuration files (a.k.a. config extensions).
|
||||||
|
# Supports absolute paths, and uses the current directory (where the bootstrap was invoked)
|
||||||
|
# as the base if the given path is not absolute.
|
||||||
|
#
|
||||||
|
# The overriding logic follows a right-to-left order. For example, in `include = ["a.toml", "b.toml"]`,
|
||||||
|
# extension `b.toml` overrides `a.toml`. Also, parent extensions always overrides the inner ones.
|
||||||
|
#include = []
|
||||||
|
|
||||||
# Keeps track of major changes made to this configuration.
|
# Keeps track of major changes made to this configuration.
|
||||||
#
|
#
|
||||||
# This value also represents ID of the PR that caused major changes. Meaning,
|
# This value also represents ID of the PR that caused major changes. Meaning,
|
||||||
|
|
|
@ -191,7 +191,6 @@ pub enum AttributeKind {
|
||||||
},
|
},
|
||||||
MacroTransparency(Transparency),
|
MacroTransparency(Transparency),
|
||||||
Repr(ThinVec<(ReprAttr, Span)>),
|
Repr(ThinVec<(ReprAttr, Span)>),
|
||||||
RustcMacroEdition2021,
|
|
||||||
Stability {
|
Stability {
|
||||||
stability: Stability,
|
stability: Stability,
|
||||||
/// Span of the `#[stable(...)]` or `#[unstable(...)]` attribute
|
/// Span of the `#[stable(...)]` or `#[unstable(...)]` attribute
|
||||||
|
|
|
@ -28,7 +28,6 @@ pub(crate) mod cfg;
|
||||||
pub(crate) mod confusables;
|
pub(crate) mod confusables;
|
||||||
pub(crate) mod deprecation;
|
pub(crate) mod deprecation;
|
||||||
pub(crate) mod repr;
|
pub(crate) mod repr;
|
||||||
pub(crate) mod rustc;
|
|
||||||
pub(crate) mod stability;
|
pub(crate) mod stability;
|
||||||
pub(crate) mod transparency;
|
pub(crate) mod transparency;
|
||||||
pub(crate) mod util;
|
pub(crate) mod util;
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
use rustc_attr_data_structures::AttributeKind;
|
|
||||||
use rustc_span::sym;
|
|
||||||
|
|
||||||
use super::{AcceptContext, SingleAttributeParser};
|
|
||||||
use crate::parser::ArgParser;
|
|
||||||
|
|
||||||
pub(crate) struct RustcMacroEdition2021Parser;
|
|
||||||
|
|
||||||
// FIXME(jdonszelmann): make these proper diagnostics
|
|
||||||
impl SingleAttributeParser for RustcMacroEdition2021Parser {
|
|
||||||
const PATH: &'static [rustc_span::Symbol] = &[sym::rustc_macro_edition_2021];
|
|
||||||
|
|
||||||
fn on_duplicate(_cx: &crate::context::AcceptContext<'_>, _first_span: rustc_span::Span) {}
|
|
||||||
|
|
||||||
fn convert(_cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option<AttributeKind> {
|
|
||||||
assert!(args.no_args());
|
|
||||||
Some(AttributeKind::RustcMacroEdition2021)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -15,7 +15,6 @@ use crate::attributes::allow_unstable::{AllowConstFnUnstableParser, AllowInterna
|
||||||
use crate::attributes::confusables::ConfusablesParser;
|
use crate::attributes::confusables::ConfusablesParser;
|
||||||
use crate::attributes::deprecation::DeprecationParser;
|
use crate::attributes::deprecation::DeprecationParser;
|
||||||
use crate::attributes::repr::ReprParser;
|
use crate::attributes::repr::ReprParser;
|
||||||
use crate::attributes::rustc::RustcMacroEdition2021Parser;
|
|
||||||
use crate::attributes::stability::{
|
use crate::attributes::stability::{
|
||||||
BodyStabilityParser, ConstStabilityIndirectParser, ConstStabilityParser, StabilityParser,
|
BodyStabilityParser, ConstStabilityIndirectParser, ConstStabilityParser, StabilityParser,
|
||||||
};
|
};
|
||||||
|
@ -77,7 +76,6 @@ attribute_groups!(
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
Single<ConstStabilityIndirectParser>,
|
Single<ConstStabilityIndirectParser>,
|
||||||
Single<DeprecationParser>,
|
Single<DeprecationParser>,
|
||||||
Single<RustcMacroEdition2021Parser>,
|
|
||||||
Single<TransparencyParser>,
|
Single<TransparencyParser>,
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
];
|
];
|
||||||
|
|
|
@ -247,9 +247,9 @@ builtin_macros_multiple_defaults = multiple declared defaults
|
||||||
.suggestion = make `{$ident}` default
|
.suggestion = make `{$ident}` default
|
||||||
|
|
||||||
builtin_macros_naked_functions_testing_attribute =
|
builtin_macros_naked_functions_testing_attribute =
|
||||||
cannot use `#[naked]` with testing attributes
|
cannot use `#[unsafe(naked)]` with testing attributes
|
||||||
.label = function marked with testing attribute here
|
.label = function marked with testing attribute here
|
||||||
.naked_attribute = `#[naked]` is incompatible with testing attributes
|
.naked_attribute = `#[unsafe(naked)]` is incompatible with testing attributes
|
||||||
|
|
||||||
builtin_macros_no_default_variant = `#[derive(Default)]` on enum with no `#[default]`
|
builtin_macros_no_default_variant = `#[derive(Default)]` on enum with no `#[default]`
|
||||||
.label = this enum needs a unit variant marked with `#[default]`
|
.label = this enum needs a unit variant marked with `#[default]`
|
||||||
|
|
|
@ -387,11 +387,9 @@ global_asm! {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(not(jit), target_arch = "x86_64"))]
|
#[cfg(all(not(jit), target_arch = "x86_64"))]
|
||||||
#[naked]
|
#[unsafe(naked)]
|
||||||
extern "C" fn naked_test() {
|
extern "C" fn naked_test() {
|
||||||
unsafe {
|
naked_asm!("ret")
|
||||||
naked_asm!("ret");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
|
|
|
@ -447,9 +447,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
m_len == v_len,
|
m_len == v_len,
|
||||||
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
|
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
|
||||||
);
|
);
|
||||||
|
// TODO: also support unsigned integers.
|
||||||
match *m_elem_ty.kind() {
|
match *m_elem_ty.kind() {
|
||||||
ty::Int(_) => {}
|
ty::Int(_) => {}
|
||||||
_ => return_error!(InvalidMonomorphization::MaskType { span, name, ty: m_elem_ty }),
|
_ => return_error!(InvalidMonomorphization::MaskWrongElementType {
|
||||||
|
span,
|
||||||
|
name,
|
||||||
|
ty: m_elem_ty
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate()));
|
return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate()));
|
||||||
}
|
}
|
||||||
|
@ -991,19 +996,15 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
assert_eq!(pointer_count - 1, ptr_count(element_ty0));
|
assert_eq!(pointer_count - 1, ptr_count(element_ty0));
|
||||||
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
||||||
|
|
||||||
// The element type of the third argument must be a signed integer type of any width:
|
// The element type of the third argument must be an integer type of any width:
|
||||||
|
// TODO: also support unsigned integers.
|
||||||
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
|
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
|
||||||
match *element_ty2.kind() {
|
match *element_ty2.kind() {
|
||||||
ty::Int(_) => (),
|
ty::Int(_) => (),
|
||||||
_ => {
|
_ => {
|
||||||
require!(
|
require!(
|
||||||
false,
|
false,
|
||||||
InvalidMonomorphization::ThirdArgElementType {
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 }
|
||||||
span,
|
|
||||||
name,
|
|
||||||
expected_element: element_ty2,
|
|
||||||
third_arg: arg_tys[2]
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1109,17 +1110,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
||||||
|
|
||||||
// The element type of the third argument must be a signed integer type of any width:
|
// The element type of the third argument must be a signed integer type of any width:
|
||||||
|
// TODO: also support unsigned integers.
|
||||||
match *element_ty2.kind() {
|
match *element_ty2.kind() {
|
||||||
ty::Int(_) => (),
|
ty::Int(_) => (),
|
||||||
_ => {
|
_ => {
|
||||||
require!(
|
require!(
|
||||||
false,
|
false,
|
||||||
InvalidMonomorphization::ThirdArgElementType {
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 }
|
||||||
span,
|
|
||||||
name,
|
|
||||||
expected_element: element_ty2,
|
|
||||||
third_arg: arg_tys[2]
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1184,18 +1184,6 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the bitwidth of the `$ty` argument if it is an `Int` type.
|
|
||||||
macro_rules! require_int_ty {
|
|
||||||
($ty: expr, $diag: expr) => {
|
|
||||||
match $ty {
|
|
||||||
ty::Int(i) => i.bit_width().unwrap_or_else(|| bx.data_layout().pointer_size.bits()),
|
|
||||||
_ => {
|
|
||||||
return_error!($diag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the bitwidth of the `$ty` argument if it is an `Int` or `Uint` type.
|
/// Returns the bitwidth of the `$ty` argument if it is an `Int` or `Uint` type.
|
||||||
macro_rules! require_int_or_uint_ty {
|
macro_rules! require_int_or_uint_ty {
|
||||||
($ty: expr, $diag: expr) => {
|
($ty: expr, $diag: expr) => {
|
||||||
|
@ -1485,9 +1473,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
m_len == v_len,
|
m_len == v_len,
|
||||||
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
|
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
|
||||||
);
|
);
|
||||||
let in_elem_bitwidth = require_int_ty!(
|
let in_elem_bitwidth = require_int_or_uint_ty!(
|
||||||
m_elem_ty.kind(),
|
m_elem_ty.kind(),
|
||||||
InvalidMonomorphization::MaskType { span, name, ty: m_elem_ty }
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: m_elem_ty }
|
||||||
);
|
);
|
||||||
let m_i1s = vector_mask_to_bitmask(bx, args[0].immediate(), in_elem_bitwidth, m_len);
|
let m_i1s = vector_mask_to_bitmask(bx, args[0].immediate(), in_elem_bitwidth, m_len);
|
||||||
return Ok(bx.select(m_i1s, args[1].immediate(), args[2].immediate()));
|
return Ok(bx.select(m_i1s, args[1].immediate(), args[2].immediate()));
|
||||||
|
@ -1508,7 +1496,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
// Integer vector <i{in_bitwidth} x in_len>:
|
// Integer vector <i{in_bitwidth} x in_len>:
|
||||||
let in_elem_bitwidth = require_int_or_uint_ty!(
|
let in_elem_bitwidth = require_int_or_uint_ty!(
|
||||||
in_elem.kind(),
|
in_elem.kind(),
|
||||||
InvalidMonomorphization::VectorArgument { span, name, in_ty, in_elem }
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: in_elem }
|
||||||
);
|
);
|
||||||
|
|
||||||
let i1xn = vector_mask_to_bitmask(bx, args[0].immediate(), in_elem_bitwidth, in_len);
|
let i1xn = vector_mask_to_bitmask(bx, args[0].immediate(), in_elem_bitwidth, in_len);
|
||||||
|
@ -1732,14 +1720,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
let mask_elem_bitwidth = require_int_ty!(
|
let mask_elem_bitwidth = require_int_or_uint_ty!(
|
||||||
element_ty2.kind(),
|
element_ty2.kind(),
|
||||||
InvalidMonomorphization::ThirdArgElementType {
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 }
|
||||||
span,
|
|
||||||
name,
|
|
||||||
expected_element: element_ty2,
|
|
||||||
third_arg: arg_tys[2]
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Alignment of T, must be a constant integer value:
|
// Alignment of T, must be a constant integer value:
|
||||||
|
@ -1834,14 +1817,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
let m_elem_bitwidth = require_int_ty!(
|
let m_elem_bitwidth = require_int_or_uint_ty!(
|
||||||
mask_elem.kind(),
|
mask_elem.kind(),
|
||||||
InvalidMonomorphization::ThirdArgElementType {
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: mask_elem }
|
||||||
span,
|
|
||||||
name,
|
|
||||||
expected_element: values_elem,
|
|
||||||
third_arg: mask_ty,
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len);
|
let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len);
|
||||||
|
@ -1924,14 +1902,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
let m_elem_bitwidth = require_int_ty!(
|
let m_elem_bitwidth = require_int_or_uint_ty!(
|
||||||
mask_elem.kind(),
|
mask_elem.kind(),
|
||||||
InvalidMonomorphization::ThirdArgElementType {
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: mask_elem }
|
||||||
span,
|
|
||||||
name,
|
|
||||||
expected_element: values_elem,
|
|
||||||
third_arg: mask_ty,
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len);
|
let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len);
|
||||||
|
@ -2019,15 +1992,10 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
// The element type of the third argument must be a signed integer type of any width:
|
// The element type of the third argument must be an integer type of any width:
|
||||||
let mask_elem_bitwidth = require_int_ty!(
|
let mask_elem_bitwidth = require_int_or_uint_ty!(
|
||||||
element_ty2.kind(),
|
element_ty2.kind(),
|
||||||
InvalidMonomorphization::ThirdArgElementType {
|
InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 }
|
||||||
span,
|
|
||||||
name,
|
|
||||||
expected_element: element_ty2,
|
|
||||||
third_arg: arg_tys[2]
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Alignment of T, must be a constant integer value:
|
// Alignment of T, must be a constant integer value:
|
||||||
|
|
|
@ -125,8 +125,7 @@ codegen_ssa_invalid_monomorphization_inserted_type = invalid monomorphization of
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_invalid_bitmask = invalid monomorphization of `{$name}` intrinsic: invalid bitmask `{$mask_ty}`, expected `u{$expected_int_bits}` or `[u8; {$expected_bytes}]`
|
codegen_ssa_invalid_monomorphization_invalid_bitmask = invalid monomorphization of `{$name}` intrinsic: invalid bitmask `{$mask_ty}`, expected `u{$expected_int_bits}` or `[u8; {$expected_bytes}]`
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_mask_type = invalid monomorphization of `{$name}` intrinsic: found mask element type is `{$ty}`, expected a signed integer type
|
codegen_ssa_invalid_monomorphization_mask_wrong_element_type = invalid monomorphization of `{$name}` intrinsic: expected mask element type to be an integer, found `{$ty}`
|
||||||
.note = the mask may be widened, which only has the correct behavior for signed integers
|
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_mismatched_lengths = invalid monomorphization of `{$name}` intrinsic: mismatched lengths: mask length `{$m_len}` != other vector length `{$v_len}`
|
codegen_ssa_invalid_monomorphization_mismatched_lengths = invalid monomorphization of `{$name}` intrinsic: mismatched lengths: mask length `{$m_len}` != other vector length `{$v_len}`
|
||||||
|
|
||||||
|
@ -158,8 +157,6 @@ codegen_ssa_invalid_monomorphization_simd_shuffle = invalid monomorphization of
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_simd_third = invalid monomorphization of `{$name}` intrinsic: expected SIMD third type, found non-SIMD `{$ty}`
|
codegen_ssa_invalid_monomorphization_simd_third = invalid monomorphization of `{$name}` intrinsic: expected SIMD third type, found non-SIMD `{$ty}`
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_third_arg_element_type = invalid monomorphization of `{$name}` intrinsic: expected element type `{$expected_element}` of third argument `{$third_arg}` to be a signed integer type
|
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_third_argument_length = invalid monomorphization of `{$name}` intrinsic: expected third argument with length {$in_len} (same as input type `{$in_ty}`), found `{$arg_ty}` with length {$out_len}
|
codegen_ssa_invalid_monomorphization_third_argument_length = invalid monomorphization of `{$name}` intrinsic: expected third argument with length {$in_len} (same as input type `{$in_ty}`), found `{$arg_ty}` with length {$out_len}
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_unrecognized_intrinsic = invalid monomorphization of `{$name}` intrinsic: unrecognized intrinsic `{$name}`
|
codegen_ssa_invalid_monomorphization_unrecognized_intrinsic = invalid monomorphization of `{$name}` intrinsic: unrecognized intrinsic `{$name}`
|
||||||
|
@ -172,8 +169,6 @@ codegen_ssa_invalid_monomorphization_unsupported_symbol = invalid monomorphizati
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size = invalid monomorphization of `{$name}` intrinsic: unsupported {$symbol} from `{$in_ty}` with element `{$in_elem}` of size `{$size}` to `{$ret_ty}`
|
codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size = invalid monomorphization of `{$name}` intrinsic: unsupported {$symbol} from `{$in_ty}` with element `{$in_elem}` of size `{$size}` to `{$ret_ty}`
|
||||||
|
|
||||||
codegen_ssa_invalid_monomorphization_vector_argument = invalid monomorphization of `{$name}` intrinsic: vector argument `{$in_ty}`'s element type `{$in_elem}`, expected integer element type
|
|
||||||
|
|
||||||
codegen_ssa_invalid_no_sanitize = invalid argument for `no_sanitize`
|
codegen_ssa_invalid_no_sanitize = invalid argument for `no_sanitize`
|
||||||
.note = expected one of: `address`, `cfi`, `hwaddress`, `kcfi`, `memory`, `memtag`, `shadow-call-stack`, or `thread`
|
.note = expected one of: `address`, `cfi`, `hwaddress`, `kcfi`, `memory`, `memtag`, `shadow-call-stack`, or `thread`
|
||||||
|
|
||||||
|
|
|
@ -1037,24 +1037,14 @@ pub enum InvalidMonomorphization<'tcx> {
|
||||||
v_len: u64,
|
v_len: u64,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[diag(codegen_ssa_invalid_monomorphization_mask_type, code = E0511)]
|
#[diag(codegen_ssa_invalid_monomorphization_mask_wrong_element_type, code = E0511)]
|
||||||
#[note]
|
MaskWrongElementType {
|
||||||
MaskType {
|
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
span: Span,
|
span: Span,
|
||||||
name: Symbol,
|
name: Symbol,
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[diag(codegen_ssa_invalid_monomorphization_vector_argument, code = E0511)]
|
|
||||||
VectorArgument {
|
|
||||||
#[primary_span]
|
|
||||||
span: Span,
|
|
||||||
name: Symbol,
|
|
||||||
in_ty: Ty<'tcx>,
|
|
||||||
in_elem: Ty<'tcx>,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[diag(codegen_ssa_invalid_monomorphization_cannot_return, code = E0511)]
|
#[diag(codegen_ssa_invalid_monomorphization_cannot_return, code = E0511)]
|
||||||
CannotReturn {
|
CannotReturn {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
|
@ -1077,15 +1067,6 @@ pub enum InvalidMonomorphization<'tcx> {
|
||||||
mutability: ExpectedPointerMutability,
|
mutability: ExpectedPointerMutability,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[diag(codegen_ssa_invalid_monomorphization_third_arg_element_type, code = E0511)]
|
|
||||||
ThirdArgElementType {
|
|
||||||
#[primary_span]
|
|
||||||
span: Span,
|
|
||||||
name: Symbol,
|
|
||||||
expected_element: Ty<'tcx>,
|
|
||||||
third_arg: Ty<'tcx>,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[diag(codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size, code = E0511)]
|
#[diag(codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size, code = E0511)]
|
||||||
UnsupportedSymbolOfSize {
|
UnsupportedSymbolOfSize {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
|
|
|
@ -11,7 +11,7 @@ Erroneous code example:
|
||||||
|
|
||||||
```compile_fail,E0736
|
```compile_fail,E0736
|
||||||
#[inline]
|
#[inline]
|
||||||
#[naked]
|
#[unsafe(naked)]
|
||||||
fn foo() {}
|
fn foo() {}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ Erroneous code example:
|
||||||
```compile_fail,E0787
|
```compile_fail,E0787
|
||||||
#![feature(naked_functions)]
|
#![feature(naked_functions)]
|
||||||
|
|
||||||
#[naked]
|
#[unsafe(naked)]
|
||||||
pub extern "C" fn f() -> u32 {
|
pub extern "C" fn f() -> u32 {
|
||||||
42
|
42
|
||||||
}
|
}
|
||||||
|
|
|
@ -517,7 +517,7 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
|
|
||||||
// Linking:
|
// Linking:
|
||||||
gated!(
|
gated!(
|
||||||
naked, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No,
|
unsafe naked, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No,
|
||||||
naked_functions, experimental!(naked)
|
naked_functions, experimental!(naked)
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -676,14 +676,6 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
"`rustc_never_type_options` is used to experiment with never type fallback and work on \
|
"`rustc_never_type_options` is used to experiment with never type fallback and work on \
|
||||||
never type stabilization, and will never be stable"
|
never type stabilization, and will never be stable"
|
||||||
),
|
),
|
||||||
rustc_attr!(
|
|
||||||
rustc_macro_edition_2021,
|
|
||||||
Normal,
|
|
||||||
template!(Word),
|
|
||||||
ErrorFollowing,
|
|
||||||
EncodeCrossCrate::No,
|
|
||||||
"makes spans in this macro edition 2021"
|
|
||||||
),
|
|
||||||
|
|
||||||
// ==========================================================================
|
// ==========================================================================
|
||||||
// Internal attributes: Runtime related:
|
// Internal attributes: Runtime related:
|
||||||
|
|
|
@ -818,8 +818,8 @@ fn test_unstable_options_tracking_hash() {
|
||||||
tracked!(min_function_alignment, Some(Align::EIGHT));
|
tracked!(min_function_alignment, Some(Align::EIGHT));
|
||||||
tracked!(mir_emit_retag, true);
|
tracked!(mir_emit_retag, true);
|
||||||
tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]);
|
tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]);
|
||||||
tracked!(mir_keep_place_mention, true);
|
|
||||||
tracked!(mir_opt_level, Some(4));
|
tracked!(mir_opt_level, Some(4));
|
||||||
|
tracked!(mir_preserve_ub, true);
|
||||||
tracked!(move_size_limit, Some(4096));
|
tracked!(move_size_limit, Some(4096));
|
||||||
tracked!(mutable_noalias, false);
|
tracked!(mutable_noalias, false);
|
||||||
tracked!(next_solver, NextSolverConfig { coherence: true, globally: true });
|
tracked!(next_solver, NextSolverConfig { coherence: true, globally: true });
|
||||||
|
|
|
@ -427,12 +427,21 @@ impl<'a> CrateLocator<'a> {
|
||||||
|
|
||||||
let (rlibs, rmetas, dylibs) =
|
let (rlibs, rmetas, dylibs) =
|
||||||
candidates.entry(hash.to_string()).or_default();
|
candidates.entry(hash.to_string()).or_default();
|
||||||
let path =
|
{
|
||||||
try_canonicalize(&spf.path).unwrap_or_else(|_| spf.path.to_path_buf());
|
// As a perforamnce optimisation we canonicalize the path and skip
|
||||||
|
// ones we've already seeen. This allows us to ignore crates
|
||||||
|
// we know are exactual equal to ones we've already found.
|
||||||
|
// Going to the same crate through different symlinks does not change the result.
|
||||||
|
let path = try_canonicalize(&spf.path)
|
||||||
|
.unwrap_or_else(|_| spf.path.to_path_buf());
|
||||||
if seen_paths.contains(&path) {
|
if seen_paths.contains(&path) {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
seen_paths.insert(path.clone());
|
seen_paths.insert(path);
|
||||||
|
}
|
||||||
|
// Use the original path (potentially with unresolved symlinks),
|
||||||
|
// filesystem code should not care, but this is nicer for diagnostics.
|
||||||
|
let path = spf.path.to_path_buf();
|
||||||
match kind {
|
match kind {
|
||||||
CrateFlavor::Rlib => rlibs.insert(path, search_path.kind),
|
CrateFlavor::Rlib => rlibs.insert(path, search_path.kind),
|
||||||
CrateFlavor::Rmeta => rmetas.insert(path, search_path.kind),
|
CrateFlavor::Rmeta => rmetas.insert(path, search_path.kind),
|
||||||
|
|
|
@ -55,8 +55,6 @@ bitflags::bitflags! {
|
||||||
const IS_UNSAFE_CELL = 1 << 9;
|
const IS_UNSAFE_CELL = 1 << 9;
|
||||||
/// Indicates whether the type is `UnsafePinned`.
|
/// Indicates whether the type is `UnsafePinned`.
|
||||||
const IS_UNSAFE_PINNED = 1 << 10;
|
const IS_UNSAFE_PINNED = 1 << 10;
|
||||||
/// Indicates whether the type is anonymous.
|
|
||||||
const IS_ANONYMOUS = 1 << 11;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rustc_data_structures::external_bitflags_debug! { AdtFlags }
|
rustc_data_structures::external_bitflags_debug! { AdtFlags }
|
||||||
|
|
|
@ -564,13 +564,17 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::InlineAsm(box InlineAsmExpr {
|
ExprKind::InlineAsm(box InlineAsmExpr {
|
||||||
asm_macro: AsmMacro::Asm | AsmMacro::NakedAsm,
|
asm_macro: asm_macro @ (AsmMacro::Asm | AsmMacro::NakedAsm),
|
||||||
ref operands,
|
ref operands,
|
||||||
template: _,
|
template: _,
|
||||||
options: _,
|
options: _,
|
||||||
line_spans: _,
|
line_spans: _,
|
||||||
}) => {
|
}) => {
|
||||||
|
// The `naked` attribute and the `naked_asm!` block form one atomic unit of
|
||||||
|
// unsafety, and `naked_asm!` does not itself need to be wrapped in an unsafe block.
|
||||||
|
if let AsmMacro::Asm = asm_macro {
|
||||||
self.requires_unsafe(expr.span, UseOfInlineAssembly);
|
self.requires_unsafe(expr.span, UseOfInlineAssembly);
|
||||||
|
}
|
||||||
|
|
||||||
// For inline asm, do not use `walk_expr`, since we want to handle the label block
|
// For inline asm, do not use `walk_expr`, since we want to handle the label block
|
||||||
// specially.
|
// specially.
|
||||||
|
|
|
@ -223,7 +223,7 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
|
||||||
// Since this optimization adds new basic blocks and invalidates others,
|
// Since this optimization adds new basic blocks and invalidates others,
|
||||||
// clean up the cfg to make it nicer for other passes
|
// clean up the cfg to make it nicer for other passes
|
||||||
if should_cleanup {
|
if should_cleanup {
|
||||||
simplify_cfg(body);
|
simplify_cfg(tcx, body);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl<'tcx> crate::MirPass<'tcx> for Inline {
|
||||||
let _guard = span.enter();
|
let _guard = span.enter();
|
||||||
if inline::<NormalInliner<'tcx>>(tcx, body) {
|
if inline::<NormalInliner<'tcx>>(tcx, body) {
|
||||||
debug!("running simplify cfg on {:?}", body.source);
|
debug!("running simplify cfg on {:?}", body.source);
|
||||||
simplify_cfg(body);
|
simplify_cfg(tcx, body);
|
||||||
deref_finder(tcx, body);
|
deref_finder(tcx, body);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ impl<'tcx> crate::MirPass<'tcx> for ForceInline {
|
||||||
let _guard = span.enter();
|
let _guard = span.enter();
|
||||||
if inline::<ForceInliner<'tcx>>(tcx, body) {
|
if inline::<ForceInliner<'tcx>>(tcx, body) {
|
||||||
debug!("running simplify cfg on {:?}", body.source);
|
debug!("running simplify cfg on {:?}", body.source);
|
||||||
simplify_cfg(body);
|
simplify_cfg(tcx, body);
|
||||||
deref_finder(tcx, body);
|
deref_finder(tcx, body);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,11 +90,7 @@ impl<'tcx> crate::MirPass<'tcx> for JumpThreading {
|
||||||
};
|
};
|
||||||
|
|
||||||
for bb in body.basic_blocks.indices() {
|
for bb in body.basic_blocks.indices() {
|
||||||
let old_len = finder.opportunities.len();
|
finder.start_from_switch(bb);
|
||||||
// If we have any const-eval errors discard any opportunities found
|
|
||||||
if finder.start_from_switch(bb).is_none() {
|
|
||||||
finder.opportunities.truncate(old_len);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let opportunities = finder.opportunities;
|
let opportunities = finder.opportunities;
|
||||||
|
@ -201,28 +197,26 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
|
|
||||||
/// Recursion entry point to find threading opportunities.
|
/// Recursion entry point to find threading opportunities.
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
fn start_from_switch(&mut self, bb: BasicBlock) -> Option<()> {
|
fn start_from_switch(&mut self, bb: BasicBlock) {
|
||||||
let bbdata = &self.body[bb];
|
let bbdata = &self.body[bb];
|
||||||
if bbdata.is_cleanup || self.loop_headers.contains(bb) {
|
if bbdata.is_cleanup || self.loop_headers.contains(bb) {
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { return Some(()) };
|
let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { return };
|
||||||
let Some(discr) = discr.place() else { return Some(()) };
|
let Some(discr) = discr.place() else { return };
|
||||||
debug!(?discr, ?bb);
|
debug!(?discr, ?bb);
|
||||||
|
|
||||||
let discr_ty = discr.ty(self.body, self.tcx).ty;
|
let discr_ty = discr.ty(self.body, self.tcx).ty;
|
||||||
let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else {
|
let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };
|
||||||
return Some(());
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(discr) = self.map.find(discr.as_ref()) else { return Some(()) };
|
let Some(discr) = self.map.find(discr.as_ref()) else { return };
|
||||||
debug!(?discr);
|
debug!(?discr);
|
||||||
|
|
||||||
let cost = CostChecker::new(self.tcx, self.typing_env, None, self.body);
|
let cost = CostChecker::new(self.tcx, self.typing_env, None, self.body);
|
||||||
let mut state = State::new_reachable();
|
let mut state = State::new_reachable();
|
||||||
|
|
||||||
let conds = if let Some((value, then, else_)) = targets.as_static_if() {
|
let conds = if let Some((value, then, else_)) = targets.as_static_if() {
|
||||||
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
|
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
|
||||||
self.arena.alloc_from_iter([
|
self.arena.alloc_from_iter([
|
||||||
Condition { value, polarity: Polarity::Eq, target: then },
|
Condition { value, polarity: Polarity::Eq, target: then },
|
||||||
Condition { value, polarity: Polarity::Ne, target: else_ },
|
Condition { value, polarity: Polarity::Ne, target: else_ },
|
||||||
|
@ -248,10 +242,10 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
mut state: State<ConditionSet<'a>>,
|
mut state: State<ConditionSet<'a>>,
|
||||||
mut cost: CostChecker<'_, 'tcx>,
|
mut cost: CostChecker<'_, 'tcx>,
|
||||||
depth: usize,
|
depth: usize,
|
||||||
) -> Option<()> {
|
) {
|
||||||
// Do not thread through loop headers.
|
// Do not thread through loop headers.
|
||||||
if self.loop_headers.contains(bb) {
|
if self.loop_headers.contains(bb) {
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(cost = ?cost.cost());
|
debug!(cost = ?cost.cost());
|
||||||
|
@ -259,16 +253,16 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
self.body.basic_blocks[bb].statements.iter().enumerate().rev()
|
self.body.basic_blocks[bb].statements.iter().enumerate().rev()
|
||||||
{
|
{
|
||||||
if self.is_empty(&state) {
|
if self.is_empty(&state) {
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
cost.visit_statement(stmt, Location { block: bb, statement_index });
|
cost.visit_statement(stmt, Location { block: bb, statement_index });
|
||||||
if cost.cost() > MAX_COST {
|
if cost.cost() > MAX_COST {
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Attempt to turn the `current_condition` on `lhs` into a condition on another place.
|
// Attempt to turn the `current_condition` on `lhs` into a condition on another place.
|
||||||
self.process_statement(bb, stmt, &mut state)?;
|
self.process_statement(bb, stmt, &mut state);
|
||||||
|
|
||||||
// When a statement mutates a place, assignments to that place that happen
|
// When a statement mutates a place, assignments to that place that happen
|
||||||
// above the mutation cannot fulfill a condition.
|
// above the mutation cannot fulfill a condition.
|
||||||
|
@ -280,7 +274,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.is_empty(&state) || depth >= MAX_BACKTRACK {
|
if self.is_empty(&state) || depth >= MAX_BACKTRACK {
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let last_non_rec = self.opportunities.len();
|
let last_non_rec = self.opportunities.len();
|
||||||
|
@ -293,9 +287,9 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
match term.kind {
|
match term.kind {
|
||||||
TerminatorKind::SwitchInt { ref discr, ref targets } => {
|
TerminatorKind::SwitchInt { ref discr, ref targets } => {
|
||||||
self.process_switch_int(discr, targets, bb, &mut state);
|
self.process_switch_int(discr, targets, bb, &mut state);
|
||||||
self.find_opportunity(pred, state, cost, depth + 1)?;
|
self.find_opportunity(pred, state, cost, depth + 1);
|
||||||
}
|
}
|
||||||
_ => self.recurse_through_terminator(pred, || state, &cost, depth)?,
|
_ => self.recurse_through_terminator(pred, || state, &cost, depth),
|
||||||
}
|
}
|
||||||
} else if let &[ref predecessors @ .., last_pred] = &predecessors[..] {
|
} else if let &[ref predecessors @ .., last_pred] = &predecessors[..] {
|
||||||
for &pred in predecessors {
|
for &pred in predecessors {
|
||||||
|
@ -320,13 +314,12 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
let first = &mut new_tos[0];
|
let first = &mut new_tos[0];
|
||||||
*first = ThreadingOpportunity { chain: vec![bb], target: first.target };
|
*first = ThreadingOpportunity { chain: vec![bb], target: first.target };
|
||||||
self.opportunities.truncate(last_non_rec + 1);
|
self.opportunities.truncate(last_non_rec + 1);
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for op in self.opportunities[last_non_rec..].iter_mut() {
|
for op in self.opportunities[last_non_rec..].iter_mut() {
|
||||||
op.chain.push(bb);
|
op.chain.push(bb);
|
||||||
}
|
}
|
||||||
Some(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract the mutated place from a statement.
|
/// Extract the mutated place from a statement.
|
||||||
|
@ -440,23 +433,23 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
lhs: PlaceIndex,
|
lhs: PlaceIndex,
|
||||||
rhs: &Operand<'tcx>,
|
rhs: &Operand<'tcx>,
|
||||||
state: &mut State<ConditionSet<'a>>,
|
state: &mut State<ConditionSet<'a>>,
|
||||||
) -> Option<()> {
|
) {
|
||||||
match rhs {
|
match rhs {
|
||||||
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
||||||
Operand::Constant(constant) => {
|
Operand::Constant(constant) => {
|
||||||
let constant = self
|
let Some(constant) =
|
||||||
.ecx
|
self.ecx.eval_mir_constant(&constant.const_, constant.span, None).discard_err()
|
||||||
.eval_mir_constant(&constant.const_, constant.span, None)
|
else {
|
||||||
.discard_err()?;
|
return;
|
||||||
|
};
|
||||||
self.process_constant(bb, lhs, constant, state);
|
self.process_constant(bb, lhs, constant, state);
|
||||||
}
|
}
|
||||||
// Transfer the conditions on the copied rhs.
|
// Transfer the conditions on the copied rhs.
|
||||||
Operand::Move(rhs) | Operand::Copy(rhs) => {
|
Operand::Move(rhs) | Operand::Copy(rhs) => {
|
||||||
let Some(rhs) = self.map.find(rhs.as_ref()) else { return Some(()) };
|
let Some(rhs) = self.map.find(rhs.as_ref()) else { return };
|
||||||
state.insert_place_idx(rhs, lhs, &self.map);
|
state.insert_place_idx(rhs, lhs, &self.map);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
|
@ -466,18 +459,14 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
lhs_place: &Place<'tcx>,
|
lhs_place: &Place<'tcx>,
|
||||||
rhs: &Rvalue<'tcx>,
|
rhs: &Rvalue<'tcx>,
|
||||||
state: &mut State<ConditionSet<'a>>,
|
state: &mut State<ConditionSet<'a>>,
|
||||||
) -> Option<()> {
|
) {
|
||||||
let Some(lhs) = self.map.find(lhs_place.as_ref()) else {
|
let Some(lhs) = self.map.find(lhs_place.as_ref()) else { return };
|
||||||
return Some(());
|
|
||||||
};
|
|
||||||
match rhs {
|
match rhs {
|
||||||
Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state)?,
|
Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state),
|
||||||
// Transfer the conditions on the copy rhs.
|
// Transfer the conditions on the copy rhs.
|
||||||
Rvalue::CopyForDeref(rhs) => {
|
Rvalue::CopyForDeref(rhs) => self.process_operand(bb, lhs, &Operand::Copy(*rhs), state),
|
||||||
self.process_operand(bb, lhs, &Operand::Copy(*rhs), state)?
|
|
||||||
}
|
|
||||||
Rvalue::Discriminant(rhs) => {
|
Rvalue::Discriminant(rhs) => {
|
||||||
let Some(rhs) = self.map.find_discr(rhs.as_ref()) else { return Some(()) };
|
let Some(rhs) = self.map.find_discr(rhs.as_ref()) else { return };
|
||||||
state.insert_place_idx(rhs, lhs, &self.map);
|
state.insert_place_idx(rhs, lhs, &self.map);
|
||||||
}
|
}
|
||||||
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
||||||
|
@ -485,7 +474,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
let agg_ty = lhs_place.ty(self.body, self.tcx).ty;
|
let agg_ty = lhs_place.ty(self.body, self.tcx).ty;
|
||||||
let lhs = match kind {
|
let lhs = match kind {
|
||||||
// Do not support unions.
|
// Do not support unions.
|
||||||
AggregateKind::Adt(.., Some(_)) => return Some(()),
|
AggregateKind::Adt(.., Some(_)) => return,
|
||||||
AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => {
|
AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => {
|
||||||
if let Some(discr_target) = self.map.apply(lhs, TrackElem::Discriminant)
|
if let Some(discr_target) = self.map.apply(lhs, TrackElem::Discriminant)
|
||||||
&& let Some(discr_value) = self
|
&& let Some(discr_value) = self
|
||||||
|
@ -498,23 +487,23 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
if let Some(idx) = self.map.apply(lhs, TrackElem::Variant(*variant_index)) {
|
if let Some(idx) = self.map.apply(lhs, TrackElem::Variant(*variant_index)) {
|
||||||
idx
|
idx
|
||||||
} else {
|
} else {
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => lhs,
|
_ => lhs,
|
||||||
};
|
};
|
||||||
for (field_index, operand) in operands.iter_enumerated() {
|
for (field_index, operand) in operands.iter_enumerated() {
|
||||||
if let Some(field) = self.map.apply(lhs, TrackElem::Field(field_index)) {
|
if let Some(field) = self.map.apply(lhs, TrackElem::Field(field_index)) {
|
||||||
self.process_operand(bb, field, operand, state)?;
|
self.process_operand(bb, field, operand, state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Transfer the conditions on the copy rhs, after inverting the value of the condition.
|
// Transfer the conditions on the copy rhs, after inverting the value of the condition.
|
||||||
Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => {
|
Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => {
|
||||||
let layout = self.ecx.layout_of(place.ty(self.body, self.tcx).ty).unwrap();
|
let layout = self.ecx.layout_of(place.ty(self.body, self.tcx).ty).unwrap();
|
||||||
let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return Some(()) };
|
let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return };
|
||||||
let Some(place) = self.map.find(place.as_ref()) else { return Some(()) };
|
let Some(place) = self.map.find(place.as_ref()) else { return };
|
||||||
let conds = conditions.map(self.arena, |mut cond| {
|
let Some(conds) = conditions.map(self.arena, |mut cond| {
|
||||||
cond.value = self
|
cond.value = self
|
||||||
.ecx
|
.ecx
|
||||||
.unary_op(UnOp::Not, &ImmTy::from_scalar_int(cond.value, layout))
|
.unary_op(UnOp::Not, &ImmTy::from_scalar_int(cond.value, layout))
|
||||||
|
@ -522,7 +511,9 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
.to_scalar_int()
|
.to_scalar_int()
|
||||||
.discard_err()?;
|
.discard_err()?;
|
||||||
Some(cond)
|
Some(cond)
|
||||||
})?;
|
}) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
state.insert_value_idx(place, conds, &self.map);
|
state.insert_value_idx(place, conds, &self.map);
|
||||||
}
|
}
|
||||||
// We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`.
|
// We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`.
|
||||||
|
@ -532,34 +523,38 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
box (Operand::Move(place) | Operand::Copy(place), Operand::Constant(value))
|
box (Operand::Move(place) | Operand::Copy(place), Operand::Constant(value))
|
||||||
| box (Operand::Constant(value), Operand::Move(place) | Operand::Copy(place)),
|
| box (Operand::Constant(value), Operand::Move(place) | Operand::Copy(place)),
|
||||||
) => {
|
) => {
|
||||||
let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return Some(()) };
|
let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return };
|
||||||
let Some(place) = self.map.find(place.as_ref()) else { return Some(()) };
|
let Some(place) = self.map.find(place.as_ref()) else { return };
|
||||||
let equals = match op {
|
let equals = match op {
|
||||||
BinOp::Eq => ScalarInt::TRUE,
|
BinOp::Eq => ScalarInt::TRUE,
|
||||||
BinOp::Ne => ScalarInt::FALSE,
|
BinOp::Ne => ScalarInt::FALSE,
|
||||||
_ => return Some(()),
|
_ => return,
|
||||||
};
|
};
|
||||||
if value.const_.ty().is_floating_point() {
|
if value.const_.ty().is_floating_point() {
|
||||||
// Floating point equality does not follow bit-patterns.
|
// Floating point equality does not follow bit-patterns.
|
||||||
// -0.0 and NaN both have special rules for equality,
|
// -0.0 and NaN both have special rules for equality,
|
||||||
// and therefore we cannot use integer comparisons for them.
|
// and therefore we cannot use integer comparisons for them.
|
||||||
// Avoid handling them, though this could be extended in the future.
|
// Avoid handling them, though this could be extended in the future.
|
||||||
return Some(());
|
return;
|
||||||
}
|
}
|
||||||
let value = value.const_.try_eval_scalar_int(self.tcx, self.typing_env)?;
|
let Some(value) = value.const_.try_eval_scalar_int(self.tcx, self.typing_env)
|
||||||
let conds = conditions.map(self.arena, |c| {
|
else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let Some(conds) = conditions.map(self.arena, |c| {
|
||||||
Some(Condition {
|
Some(Condition {
|
||||||
value,
|
value,
|
||||||
polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne },
|
polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne },
|
||||||
..c
|
..c
|
||||||
})
|
})
|
||||||
})?;
|
}) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
state.insert_value_idx(place, conds, &self.map);
|
state.insert_value_idx(place, conds, &self.map);
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
Some(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
|
@ -568,7 +563,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
bb: BasicBlock,
|
bb: BasicBlock,
|
||||||
stmt: &Statement<'tcx>,
|
stmt: &Statement<'tcx>,
|
||||||
state: &mut State<ConditionSet<'a>>,
|
state: &mut State<ConditionSet<'a>>,
|
||||||
) -> Option<()> {
|
) {
|
||||||
let register_opportunity = |c: Condition| {
|
let register_opportunity = |c: Condition| {
|
||||||
debug!(?bb, ?c.target, "register");
|
debug!(?bb, ?c.target, "register");
|
||||||
self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
|
self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
|
||||||
|
@ -581,32 +576,30 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
// If we expect `discriminant(place) ?= A`,
|
// If we expect `discriminant(place) ?= A`,
|
||||||
// we have an opportunity if `variant_index ?= A`.
|
// we have an opportunity if `variant_index ?= A`.
|
||||||
StatementKind::SetDiscriminant { box place, variant_index } => {
|
StatementKind::SetDiscriminant { box place, variant_index } => {
|
||||||
let Some(discr_target) = self.map.find_discr(place.as_ref()) else {
|
let Some(discr_target) = self.map.find_discr(place.as_ref()) else { return };
|
||||||
return Some(());
|
|
||||||
};
|
|
||||||
let enum_ty = place.ty(self.body, self.tcx).ty;
|
let enum_ty = place.ty(self.body, self.tcx).ty;
|
||||||
// `SetDiscriminant` guarantees that the discriminant is now `variant_index`.
|
// `SetDiscriminant` guarantees that the discriminant is now `variant_index`.
|
||||||
// Even if the discriminant write does nothing due to niches, it is UB to set the
|
// Even if the discriminant write does nothing due to niches, it is UB to set the
|
||||||
// discriminant when the data does not encode the desired discriminant.
|
// discriminant when the data does not encode the desired discriminant.
|
||||||
let discr =
|
let Some(discr) =
|
||||||
self.ecx.discriminant_for_variant(enum_ty, *variant_index).discard_err()?;
|
self.ecx.discriminant_for_variant(enum_ty, *variant_index).discard_err()
|
||||||
self.process_immediate(bb, discr_target, discr, state);
|
else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
self.process_immediate(bb, discr_target, discr, state)
|
||||||
}
|
}
|
||||||
// If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`.
|
// If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`.
|
||||||
StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(
|
StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(
|
||||||
Operand::Copy(place) | Operand::Move(place),
|
Operand::Copy(place) | Operand::Move(place),
|
||||||
)) => {
|
)) => {
|
||||||
let Some(conditions) = state.try_get(place.as_ref(), &self.map) else {
|
let Some(conditions) = state.try_get(place.as_ref(), &self.map) else { return };
|
||||||
return Some(());
|
conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity)
|
||||||
};
|
|
||||||
conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity);
|
|
||||||
}
|
}
|
||||||
StatementKind::Assign(box (lhs_place, rhs)) => {
|
StatementKind::Assign(box (lhs_place, rhs)) => {
|
||||||
self.process_assign(bb, lhs_place, rhs, state)?;
|
self.process_assign(bb, lhs_place, rhs, state)
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
Some(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self, state, cost))]
|
#[instrument(level = "trace", skip(self, state, cost))]
|
||||||
|
@ -617,7 +610,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
state: impl FnOnce() -> State<ConditionSet<'a>>,
|
state: impl FnOnce() -> State<ConditionSet<'a>>,
|
||||||
cost: &CostChecker<'_, 'tcx>,
|
cost: &CostChecker<'_, 'tcx>,
|
||||||
depth: usize,
|
depth: usize,
|
||||||
) -> Option<()> {
|
) {
|
||||||
let term = self.body.basic_blocks[bb].terminator();
|
let term = self.body.basic_blocks[bb].terminator();
|
||||||
let place_to_flood = match term.kind {
|
let place_to_flood = match term.kind {
|
||||||
// We come from a target, so those are not possible.
|
// We come from a target, so those are not possible.
|
||||||
|
@ -632,9 +625,9 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
|
||||||
| TerminatorKind::FalseUnwind { .. }
|
| TerminatorKind::FalseUnwind { .. }
|
||||||
| TerminatorKind::Yield { .. } => bug!("{term:?} invalid"),
|
| TerminatorKind::Yield { .. } => bug!("{term:?} invalid"),
|
||||||
// Cannot reason about inline asm.
|
// Cannot reason about inline asm.
|
||||||
TerminatorKind::InlineAsm { .. } => return Some(()),
|
TerminatorKind::InlineAsm { .. } => return,
|
||||||
// `SwitchInt` is handled specially.
|
// `SwitchInt` is handled specially.
|
||||||
TerminatorKind::SwitchInt { .. } => return Some(()),
|
TerminatorKind::SwitchInt { .. } => return,
|
||||||
// We can recurse, no thing particular to do.
|
// We can recurse, no thing particular to do.
|
||||||
TerminatorKind::Goto { .. } => None,
|
TerminatorKind::Goto { .. } => None,
|
||||||
// Flood the overwritten place, and progress through.
|
// Flood the overwritten place, and progress through.
|
||||||
|
|
|
@ -43,7 +43,7 @@ impl<'tcx> crate::MirPass<'tcx> for MatchBranchSimplification {
|
||||||
}
|
}
|
||||||
|
|
||||||
if should_cleanup {
|
if should_cleanup {
|
||||||
simplify_cfg(body);
|
simplify_cfg(tcx, body);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub(super) struct RemovePlaceMention;
|
||||||
|
|
||||||
impl<'tcx> crate::MirPass<'tcx> for RemovePlaceMention {
|
impl<'tcx> crate::MirPass<'tcx> for RemovePlaceMention {
|
||||||
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
|
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
|
||||||
!sess.opts.unstable_opts.mir_keep_place_mention
|
!sess.opts.unstable_opts.mir_preserve_ub
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||||
|
|
|
@ -35,7 +35,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveUnneededDrops {
|
||||||
// if we applied optimizations, we potentially have some cfg to cleanup to
|
// if we applied optimizations, we potentially have some cfg to cleanup to
|
||||||
// make it easier for further passes
|
// make it easier for further passes
|
||||||
if should_simplify {
|
if should_simplify {
|
||||||
simplify_cfg(body);
|
simplify_cfg(tcx, body);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,13 @@
|
||||||
//! Here the block (`{ return; }`) has the return type `char`, rather than `()`, but the MIR we
|
//! Here the block (`{ return; }`) has the return type `char`, rather than `()`, but the MIR we
|
||||||
//! naively generate still contains the `_a = ()` write in the unreachable block "after" the
|
//! naively generate still contains the `_a = ()` write in the unreachable block "after" the
|
||||||
//! return.
|
//! return.
|
||||||
|
//!
|
||||||
|
//! **WARNING**: This is one of the few optimizations that runs on built and analysis MIR, and
|
||||||
|
//! so its effects may affect the type-checking, borrow-checking, and other analysis of MIR.
|
||||||
|
//! We must be extremely careful to only apply optimizations that preserve UB and all
|
||||||
|
//! non-determinism, since changes here can affect which programs compile in an insta-stable way.
|
||||||
|
//! The normal logic that a program with UB can be changed to do anything does not apply to
|
||||||
|
//! pre-"runtime" MIR!
|
||||||
|
|
||||||
use rustc_index::{Idx, IndexSlice, IndexVec};
|
use rustc_index::{Idx, IndexSlice, IndexVec};
|
||||||
use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
|
use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
|
||||||
|
@ -66,8 +73,8 @@ impl SimplifyCfg {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn simplify_cfg(body: &mut Body<'_>) {
|
pub(super) fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||||
CfgSimplifier::new(body).simplify();
|
CfgSimplifier::new(tcx, body).simplify();
|
||||||
remove_dead_blocks(body);
|
remove_dead_blocks(body);
|
||||||
|
|
||||||
// FIXME: Should probably be moved into some kind of pass manager
|
// FIXME: Should probably be moved into some kind of pass manager
|
||||||
|
@ -79,9 +86,9 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyCfg {
|
||||||
self.name()
|
self.name()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||||
debug!("SimplifyCfg({:?}) - simplifying {:?}", self.name(), body.source);
|
debug!("SimplifyCfg({:?}) - simplifying {:?}", self.name(), body.source);
|
||||||
simplify_cfg(body);
|
simplify_cfg(tcx, body);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_required(&self) -> bool {
|
fn is_required(&self) -> bool {
|
||||||
|
@ -90,12 +97,13 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyCfg {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CfgSimplifier<'a, 'tcx> {
|
struct CfgSimplifier<'a, 'tcx> {
|
||||||
|
preserve_switch_reads: bool,
|
||||||
basic_blocks: &'a mut IndexSlice<BasicBlock, BasicBlockData<'tcx>>,
|
basic_blocks: &'a mut IndexSlice<BasicBlock, BasicBlockData<'tcx>>,
|
||||||
pred_count: IndexVec<BasicBlock, u32>,
|
pred_count: IndexVec<BasicBlock, u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||||
fn new(body: &'a mut Body<'tcx>) -> Self {
|
fn new(tcx: TyCtxt<'tcx>, body: &'a mut Body<'tcx>) -> Self {
|
||||||
let mut pred_count = IndexVec::from_elem(0u32, &body.basic_blocks);
|
let mut pred_count = IndexVec::from_elem(0u32, &body.basic_blocks);
|
||||||
|
|
||||||
// we can't use mir.predecessors() here because that counts
|
// we can't use mir.predecessors() here because that counts
|
||||||
|
@ -110,9 +118,12 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Preserve `SwitchInt` reads on built and analysis MIR, or if `-Zmir-preserve-ub`.
|
||||||
|
let preserve_switch_reads = matches!(body.phase, MirPhase::Built | MirPhase::Analysis(_))
|
||||||
|
|| tcx.sess.opts.unstable_opts.mir_preserve_ub;
|
||||||
let basic_blocks = body.basic_blocks_mut();
|
let basic_blocks = body.basic_blocks_mut();
|
||||||
|
|
||||||
CfgSimplifier { basic_blocks, pred_count }
|
CfgSimplifier { preserve_switch_reads, basic_blocks, pred_count }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn simplify(mut self) {
|
fn simplify(mut self) {
|
||||||
|
@ -253,9 +264,15 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||||
|
|
||||||
// turn a branch with all successors identical to a goto
|
// turn a branch with all successors identical to a goto
|
||||||
fn simplify_branch(&mut self, terminator: &mut Terminator<'tcx>) -> bool {
|
fn simplify_branch(&mut self, terminator: &mut Terminator<'tcx>) -> bool {
|
||||||
match terminator.kind {
|
// Removing a `SwitchInt` terminator may remove reads that result in UB,
|
||||||
TerminatorKind::SwitchInt { .. } => {}
|
// so we must not apply this optimization before borrowck or when
|
||||||
_ => return false,
|
// `-Zmir-preserve-ub` is set.
|
||||||
|
if self.preserve_switch_reads {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let TerminatorKind::SwitchInt { .. } = terminator.kind else {
|
||||||
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
let first_succ = {
|
let first_succ = {
|
||||||
|
|
|
@ -288,6 +288,21 @@ where
|
||||||
) -> Vec<Candidate<I>>;
|
) -> Vec<Candidate<I>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Allows callers of `assemble_and_evaluate_candidates` to choose whether to limit
|
||||||
|
/// candidate assembly to param-env and alias-bound candidates.
|
||||||
|
///
|
||||||
|
/// On top of being a micro-optimization, as it avoids doing unnecessary work when
|
||||||
|
/// a param-env trait bound candidate shadows impls for normalization, this is also
|
||||||
|
/// required to prevent query cycles due to RPITIT inference. See the issue at:
|
||||||
|
/// <https://github.com/rust-lang/trait-system-refactor-initiative/issues/173>.
|
||||||
|
pub(super) enum AssembleCandidatesFrom {
|
||||||
|
All,
|
||||||
|
/// Only assemble candidates from the environment and alias bounds, ignoring
|
||||||
|
/// user-written and built-in impls. We only expect `ParamEnv` and `AliasBound`
|
||||||
|
/// candidates to be assembled.
|
||||||
|
EnvAndBounds,
|
||||||
|
}
|
||||||
|
|
||||||
impl<D, I> EvalCtxt<'_, D>
|
impl<D, I> EvalCtxt<'_, D>
|
||||||
where
|
where
|
||||||
D: SolverDelegate<Interner = I>,
|
D: SolverDelegate<Interner = I>,
|
||||||
|
@ -296,6 +311,7 @@ where
|
||||||
pub(super) fn assemble_and_evaluate_candidates<G: GoalKind<D>>(
|
pub(super) fn assemble_and_evaluate_candidates<G: GoalKind<D>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
goal: Goal<I, G>,
|
goal: Goal<I, G>,
|
||||||
|
assemble_from: AssembleCandidatesFrom,
|
||||||
) -> Vec<Candidate<I>> {
|
) -> Vec<Candidate<I>> {
|
||||||
let Ok(normalized_self_ty) =
|
let Ok(normalized_self_ty) =
|
||||||
self.structurally_normalize_ty(goal.param_env, goal.predicate.self_ty())
|
self.structurally_normalize_ty(goal.param_env, goal.predicate.self_ty())
|
||||||
|
@ -322,16 +338,18 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.assemble_impl_candidates(goal, &mut candidates);
|
|
||||||
|
|
||||||
self.assemble_builtin_impl_candidates(goal, &mut candidates);
|
|
||||||
|
|
||||||
self.assemble_alias_bound_candidates(goal, &mut candidates);
|
self.assemble_alias_bound_candidates(goal, &mut candidates);
|
||||||
|
|
||||||
self.assemble_object_bound_candidates(goal, &mut candidates);
|
|
||||||
|
|
||||||
self.assemble_param_env_candidates(goal, &mut candidates);
|
self.assemble_param_env_candidates(goal, &mut candidates);
|
||||||
|
|
||||||
|
match assemble_from {
|
||||||
|
AssembleCandidatesFrom::All => {
|
||||||
|
self.assemble_impl_candidates(goal, &mut candidates);
|
||||||
|
self.assemble_builtin_impl_candidates(goal, &mut candidates);
|
||||||
|
self.assemble_object_bound_candidates(goal, &mut candidates);
|
||||||
|
}
|
||||||
|
AssembleCandidatesFrom::EnvAndBounds => {}
|
||||||
|
}
|
||||||
|
|
||||||
candidates
|
candidates
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -754,6 +772,9 @@ where
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Assemble and merge candidates for goals which are related to an underlying trait
|
||||||
|
/// goal. Right now, this is normalizes-to and host effect goals.
|
||||||
|
///
|
||||||
/// We sadly can't simply take all possible candidates for normalization goals
|
/// We sadly can't simply take all possible candidates for normalization goals
|
||||||
/// and check whether they result in the same constraints. We want to make sure
|
/// and check whether they result in the same constraints. We want to make sure
|
||||||
/// that trying to normalize an alias doesn't result in constraints which aren't
|
/// that trying to normalize an alias doesn't result in constraints which aren't
|
||||||
|
@ -782,47 +803,44 @@ where
|
||||||
///
|
///
|
||||||
/// See trait-system-refactor-initiative#124 for more details.
|
/// See trait-system-refactor-initiative#124 for more details.
|
||||||
#[instrument(level = "debug", skip(self, inject_normalize_to_rigid_candidate), ret)]
|
#[instrument(level = "debug", skip(self, inject_normalize_to_rigid_candidate), ret)]
|
||||||
pub(super) fn merge_candidates(
|
pub(super) fn assemble_and_merge_candidates<G: GoalKind<D>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
proven_via: Option<TraitGoalProvenVia>,
|
proven_via: Option<TraitGoalProvenVia>,
|
||||||
candidates: Vec<Candidate<I>>,
|
goal: Goal<I, G>,
|
||||||
inject_normalize_to_rigid_candidate: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult<I>,
|
inject_normalize_to_rigid_candidate: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult<I>,
|
||||||
) -> QueryResult<I> {
|
) -> QueryResult<I> {
|
||||||
let Some(proven_via) = proven_via else {
|
let Some(proven_via) = proven_via else {
|
||||||
// We don't care about overflow. If proving the trait goal overflowed, then
|
// We don't care about overflow. If proving the trait goal overflowed, then
|
||||||
// it's enough to report an overflow error for that, we don't also have to
|
// it's enough to report an overflow error for that, we don't also have to
|
||||||
// overflow during normalization.
|
// overflow during normalization.
|
||||||
return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Ambiguity));
|
//
|
||||||
|
// We use `forced_ambiguity` here over `make_ambiguous_response_no_constraints`
|
||||||
|
// because the former will also record a built-in candidate in the inspector.
|
||||||
|
return self.forced_ambiguity(MaybeCause::Ambiguity).map(|cand| cand.result);
|
||||||
};
|
};
|
||||||
|
|
||||||
match proven_via {
|
match proven_via {
|
||||||
TraitGoalProvenVia::ParamEnv | TraitGoalProvenVia::AliasBound => {
|
TraitGoalProvenVia::ParamEnv | TraitGoalProvenVia::AliasBound => {
|
||||||
let mut considered_candidates = Vec::new();
|
|
||||||
considered_candidates.extend(
|
|
||||||
candidates
|
|
||||||
.iter()
|
|
||||||
.filter(|c| matches!(c.source, CandidateSource::ParamEnv(_)))
|
|
||||||
.map(|c| c.result),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Even when a trait bound has been proven using a where-bound, we
|
// Even when a trait bound has been proven using a where-bound, we
|
||||||
// still need to consider alias-bounds for normalization, see
|
// still need to consider alias-bounds for normalization, see
|
||||||
// tests/ui/next-solver/alias-bound-shadowed-by-env.rs.
|
// `tests/ui/next-solver/alias-bound-shadowed-by-env.rs`.
|
||||||
//
|
let candidates_from_env_and_bounds: Vec<_> = self
|
||||||
|
.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::EnvAndBounds);
|
||||||
|
|
||||||
// We still need to prefer where-bounds over alias-bounds however.
|
// We still need to prefer where-bounds over alias-bounds however.
|
||||||
// See tests/ui/winnowing/norm-where-bound-gt-alias-bound.rs.
|
// See `tests/ui/winnowing/norm-where-bound-gt-alias-bound.rs`.
|
||||||
//
|
let mut considered_candidates: Vec<_> = if candidates_from_env_and_bounds
|
||||||
// FIXME(const_trait_impl): should this behavior also be used by
|
|
||||||
// constness checking. Doing so is *at least theoretically* breaking,
|
|
||||||
// see github.com/rust-lang/rust/issues/133044#issuecomment-2500709754
|
|
||||||
if considered_candidates.is_empty() {
|
|
||||||
considered_candidates.extend(
|
|
||||||
candidates
|
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|c| matches!(c.source, CandidateSource::AliasBound))
|
.any(|c| matches!(c.source, CandidateSource::ParamEnv(_)))
|
||||||
.map(|c| c.result),
|
{
|
||||||
);
|
candidates_from_env_and_bounds
|
||||||
}
|
.into_iter()
|
||||||
|
.filter(|c| matches!(c.source, CandidateSource::ParamEnv(_)))
|
||||||
|
.map(|c| c.result)
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
candidates_from_env_and_bounds.into_iter().map(|c| c.result).collect()
|
||||||
|
};
|
||||||
|
|
||||||
// If the trait goal has been proven by using the environment, we want to treat
|
// If the trait goal has been proven by using the environment, we want to treat
|
||||||
// aliases as rigid if there are no applicable projection bounds in the environment.
|
// aliases as rigid if there are no applicable projection bounds in the environment.
|
||||||
|
@ -839,6 +857,9 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TraitGoalProvenVia::Misc => {
|
TraitGoalProvenVia::Misc => {
|
||||||
|
let candidates =
|
||||||
|
self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All);
|
||||||
|
|
||||||
// Prefer "orphaned" param-env normalization predicates, which are used
|
// Prefer "orphaned" param-env normalization predicates, which are used
|
||||||
// (for example, and ideally only) when proving item bounds for an impl.
|
// (for example, and ideally only) when proving item bounds for an impl.
|
||||||
let candidates_from_env: Vec<_> = candidates
|
let candidates_from_env: Vec<_> = candidates
|
||||||
|
|
|
@ -399,12 +399,11 @@ where
|
||||||
&mut self,
|
&mut self,
|
||||||
goal: Goal<I, ty::HostEffectPredicate<I>>,
|
goal: Goal<I, ty::HostEffectPredicate<I>>,
|
||||||
) -> QueryResult<I> {
|
) -> QueryResult<I> {
|
||||||
let candidates = self.assemble_and_evaluate_candidates(goal);
|
|
||||||
let (_, proven_via) = self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
|
let (_, proven_via) = self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
|
||||||
let trait_goal: Goal<I, ty::TraitPredicate<I>> =
|
let trait_goal: Goal<I, ty::TraitPredicate<I>> =
|
||||||
goal.with(ecx.cx(), goal.predicate.trait_ref);
|
goal.with(ecx.cx(), goal.predicate.trait_ref);
|
||||||
ecx.compute_trait_goal(trait_goal)
|
ecx.compute_trait_goal(trait_goal)
|
||||||
})?;
|
})?;
|
||||||
self.merge_candidates(proven_via, candidates, |_ecx| Err(NoSolution))
|
self.assemble_and_merge_candidates(proven_via, goal, |_ecx| Err(NoSolution))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,14 +32,13 @@ where
|
||||||
let cx = self.cx();
|
let cx = self.cx();
|
||||||
match goal.predicate.alias.kind(cx) {
|
match goal.predicate.alias.kind(cx) {
|
||||||
ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst => {
|
ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst => {
|
||||||
let candidates = self.assemble_and_evaluate_candidates(goal);
|
|
||||||
let trait_ref = goal.predicate.alias.trait_ref(cx);
|
let trait_ref = goal.predicate.alias.trait_ref(cx);
|
||||||
let (_, proven_via) =
|
let (_, proven_via) =
|
||||||
self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
|
self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
|
||||||
let trait_goal: Goal<I, ty::TraitPredicate<I>> = goal.with(cx, trait_ref);
|
let trait_goal: Goal<I, ty::TraitPredicate<I>> = goal.with(cx, trait_ref);
|
||||||
ecx.compute_trait_goal(trait_goal)
|
ecx.compute_trait_goal(trait_goal)
|
||||||
})?;
|
})?;
|
||||||
self.merge_candidates(proven_via, candidates, |ecx| {
|
self.assemble_and_merge_candidates(proven_via, goal, |ecx| {
|
||||||
ecx.probe(|&result| ProbeKind::RigidAlias { result }).enter(|this| {
|
ecx.probe(|&result| ProbeKind::RigidAlias { result }).enter(|this| {
|
||||||
this.structurally_instantiate_normalizes_to_term(
|
this.structurally_instantiate_normalizes_to_term(
|
||||||
goal,
|
goal,
|
||||||
|
|
|
@ -13,7 +13,7 @@ use tracing::{instrument, trace};
|
||||||
|
|
||||||
use crate::delegate::SolverDelegate;
|
use crate::delegate::SolverDelegate;
|
||||||
use crate::solve::assembly::structural_traits::{self, AsyncCallableRelevantTypes};
|
use crate::solve::assembly::structural_traits::{self, AsyncCallableRelevantTypes};
|
||||||
use crate::solve::assembly::{self, Candidate};
|
use crate::solve::assembly::{self, AssembleCandidatesFrom, Candidate};
|
||||||
use crate::solve::inspect::ProbeKind;
|
use crate::solve::inspect::ProbeKind;
|
||||||
use crate::solve::{
|
use crate::solve::{
|
||||||
BuiltinImplSource, CandidateSource, Certainty, EvalCtxt, Goal, GoalSource, MaybeCause,
|
BuiltinImplSource, CandidateSource, Certainty, EvalCtxt, Goal, GoalSource, MaybeCause,
|
||||||
|
@ -1365,7 +1365,7 @@ where
|
||||||
&mut self,
|
&mut self,
|
||||||
goal: Goal<I, TraitPredicate<I>>,
|
goal: Goal<I, TraitPredicate<I>>,
|
||||||
) -> Result<(CanonicalResponse<I>, Option<TraitGoalProvenVia>), NoSolution> {
|
) -> Result<(CanonicalResponse<I>, Option<TraitGoalProvenVia>), NoSolution> {
|
||||||
let candidates = self.assemble_and_evaluate_candidates(goal);
|
let candidates = self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All);
|
||||||
self.merge_trait_candidates(goal, candidates)
|
self.merge_trait_candidates(goal, candidates)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1884,13 +1884,15 @@ impl<'a> Parser<'a> {
|
||||||
let mut expr = self.parse_expr_opt()?;
|
let mut expr = self.parse_expr_opt()?;
|
||||||
if let Some(expr) = &mut expr {
|
if let Some(expr) = &mut expr {
|
||||||
if label.is_some()
|
if label.is_some()
|
||||||
&& matches!(
|
&& match &expr.kind {
|
||||||
expr.kind,
|
|
||||||
ExprKind::While(_, _, None)
|
ExprKind::While(_, _, None)
|
||||||
| ExprKind::ForLoop { label: None, .. }
|
| ExprKind::ForLoop { label: None, .. }
|
||||||
| ExprKind::Loop(_, None, _)
|
| ExprKind::Loop(_, None, _) => true,
|
||||||
| ExprKind::Block(_, None)
|
ExprKind::Block(block, None) => {
|
||||||
)
|
matches!(block.rules, BlockCheckMode::Default)
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
{
|
{
|
||||||
self.psess.buffer_lint(
|
self.psess.buffer_lint(
|
||||||
BREAK_WITH_LABEL_AND_LOOP,
|
BREAK_WITH_LABEL_AND_LOOP,
|
||||||
|
|
|
@ -194,12 +194,6 @@ pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if let Safety::Unsafe(unsafe_span) = attr_item.unsafety {
|
} else if let Safety::Unsafe(unsafe_span) = attr_item.unsafety {
|
||||||
// Allow (but don't require) `#[unsafe(naked)]` so that compiler-builtins can upgrade to it.
|
|
||||||
// FIXME(#139797): remove this special case when compiler-builtins has upgraded.
|
|
||||||
if attr.has_name(sym::naked) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
psess.dcx().emit_err(errors::InvalidAttrUnsafe {
|
psess.dcx().emit_err(errors::InvalidAttrUnsafe {
|
||||||
span: unsafe_span,
|
span: unsafe_span,
|
||||||
name: attr_item.path.clone(),
|
name: attr_item.path.clone(),
|
||||||
|
|
|
@ -508,7 +508,7 @@ passes_must_use_no_effect =
|
||||||
`#[must_use]` has no effect when applied to {$article} {$target}
|
`#[must_use]` has no effect when applied to {$article} {$target}
|
||||||
|
|
||||||
passes_naked_asm_outside_naked_fn =
|
passes_naked_asm_outside_naked_fn =
|
||||||
the `naked_asm!` macro can only be used in functions marked with `#[naked]`
|
the `naked_asm!` macro can only be used in functions marked with `#[unsafe(naked)]`
|
||||||
|
|
||||||
passes_naked_functions_asm_block =
|
passes_naked_functions_asm_block =
|
||||||
naked functions must contain a single `naked_asm!` invocation
|
naked functions must contain a single `naked_asm!` invocation
|
||||||
|
@ -516,9 +516,9 @@ passes_naked_functions_asm_block =
|
||||||
.label_non_asm = not allowed in naked functions
|
.label_non_asm = not allowed in naked functions
|
||||||
|
|
||||||
passes_naked_functions_incompatible_attribute =
|
passes_naked_functions_incompatible_attribute =
|
||||||
attribute incompatible with `#[naked]`
|
attribute incompatible with `#[unsafe(naked)]`
|
||||||
.label = the `{$attr}` attribute is incompatible with `#[naked]`
|
.label = the `{$attr}` attribute is incompatible with `#[unsafe(naked)]`
|
||||||
.naked_attribute = function marked with `#[naked]` here
|
.naked_attribute = function marked with `#[unsafe(naked)]` here
|
||||||
|
|
||||||
passes_naked_functions_must_naked_asm =
|
passes_naked_functions_must_naked_asm =
|
||||||
the `asm!` macro is not allowed in naked functions
|
the `asm!` macro is not allowed in naked functions
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::sync::Arc;
|
||||||
use rustc_ast::expand::StrippedCfgItem;
|
use rustc_ast::expand::StrippedCfgItem;
|
||||||
use rustc_ast::{self as ast, Crate, NodeId, attr};
|
use rustc_ast::{self as ast, Crate, NodeId, attr};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_attr_parsing::{AttributeKind, StabilityLevel, find_attr};
|
use rustc_attr_parsing::StabilityLevel;
|
||||||
use rustc_data_structures::intern::Interned;
|
use rustc_data_structures::intern::Interned;
|
||||||
use rustc_errors::{Applicability, DiagCtxtHandle, StashKey};
|
use rustc_errors::{Applicability, DiagCtxtHandle, StashKey};
|
||||||
use rustc_expand::base::{
|
use rustc_expand::base::{
|
||||||
|
@ -1128,13 +1128,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
edition,
|
edition,
|
||||||
);
|
);
|
||||||
|
|
||||||
// The #[rustc_macro_edition_2021] attribute is used by the pin!() macro
|
|
||||||
// as a temporary workaround for a regression in expressiveness in Rust 2024.
|
|
||||||
// See https://github.com/rust-lang/rust/issues/138718.
|
|
||||||
if find_attr!(attrs.iter(), AttributeKind::RustcMacroEdition2021) {
|
|
||||||
ext.edition = Edition::Edition2021;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(builtin_name) = ext.builtin_name {
|
if let Some(builtin_name) = ext.builtin_name {
|
||||||
// The macro was marked with `#[rustc_builtin_macro]`.
|
// The macro was marked with `#[rustc_builtin_macro]`.
|
||||||
if let Some(builtin_ext_kind) = self.builtin_macros.get(&builtin_name) {
|
if let Some(builtin_ext_kind) = self.builtin_macros.get(&builtin_name) {
|
||||||
|
|
|
@ -2322,12 +2322,12 @@ options! {
|
||||||
mir_include_spans: MirIncludeSpans = (MirIncludeSpans::default(), parse_mir_include_spans, [UNTRACKED],
|
mir_include_spans: MirIncludeSpans = (MirIncludeSpans::default(), parse_mir_include_spans, [UNTRACKED],
|
||||||
"include extra comments in mir pretty printing, like line numbers and statement indices, \
|
"include extra comments in mir pretty printing, like line numbers and statement indices, \
|
||||||
details about types, etc. (boolean for all passes, 'nll' to enable in NLL MIR only, default: 'nll')"),
|
details about types, etc. (boolean for all passes, 'nll' to enable in NLL MIR only, default: 'nll')"),
|
||||||
mir_keep_place_mention: bool = (false, parse_bool, [TRACKED],
|
|
||||||
"keep place mention MIR statements, interpreted e.g., by miri; implies -Zmir-opt-level=0 \
|
|
||||||
(default: no)"),
|
|
||||||
#[rustc_lint_opt_deny_field_access("use `Session::mir_opt_level` instead of this field")]
|
#[rustc_lint_opt_deny_field_access("use `Session::mir_opt_level` instead of this field")]
|
||||||
mir_opt_level: Option<usize> = (None, parse_opt_number, [TRACKED],
|
mir_opt_level: Option<usize> = (None, parse_opt_number, [TRACKED],
|
||||||
"MIR optimization level (0-4; default: 1 in non optimized builds and 2 in optimized builds)"),
|
"MIR optimization level (0-4; default: 1 in non optimized builds and 2 in optimized builds)"),
|
||||||
|
mir_preserve_ub: bool = (false, parse_bool, [TRACKED],
|
||||||
|
"keep place mention statements and reads in trivial SwitchInt terminators, which are interpreted \
|
||||||
|
e.g., by miri; implies -Zmir-opt-level=0 (default: no)"),
|
||||||
mir_strip_debuginfo: MirStripDebugInfo = (MirStripDebugInfo::None, parse_mir_strip_debuginfo, [TRACKED],
|
mir_strip_debuginfo: MirStripDebugInfo = (MirStripDebugInfo::None, parse_mir_strip_debuginfo, [TRACKED],
|
||||||
"Whether to remove some of the MIR debug info from methods. Default: None"),
|
"Whether to remove some of the MIR debug info from methods. Default: None"),
|
||||||
move_size_limit: Option<usize> = (None, parse_opt_number, [TRACKED],
|
move_size_limit: Option<usize> = (None, parse_opt_number, [TRACKED],
|
||||||
|
|
|
@ -1232,6 +1232,25 @@ impl DesugaringKind {
|
||||||
DesugaringKind::PatTyRange => "pattern type",
|
DesugaringKind::PatTyRange => "pattern type",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// For use with `rustc_unimplemented` to support conditions
|
||||||
|
/// like `from_desugaring = "QuestionMark"`
|
||||||
|
pub fn matches(&self, value: &str) -> bool {
|
||||||
|
match self {
|
||||||
|
DesugaringKind::CondTemporary => value == "CondTemporary",
|
||||||
|
DesugaringKind::Async => value == "Async",
|
||||||
|
DesugaringKind::Await => value == "Await",
|
||||||
|
DesugaringKind::QuestionMark => value == "QuestionMark",
|
||||||
|
DesugaringKind::TryBlock => value == "TryBlock",
|
||||||
|
DesugaringKind::YeetExpr => value == "YeetExpr",
|
||||||
|
DesugaringKind::OpaqueTy => value == "OpaqueTy",
|
||||||
|
DesugaringKind::ForLoop => value == "ForLoop",
|
||||||
|
DesugaringKind::WhileLoop => value == "WhileLoop",
|
||||||
|
DesugaringKind::BoundModifier => value == "BoundModifier",
|
||||||
|
DesugaringKind::Contract => value == "Contract",
|
||||||
|
DesugaringKind::PatTyRange => value == "PatTyRange",
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
|
|
@ -372,6 +372,7 @@ symbols! {
|
||||||
SyncUnsafeCell,
|
SyncUnsafeCell,
|
||||||
T,
|
T,
|
||||||
Target,
|
Target,
|
||||||
|
This,
|
||||||
ToOwned,
|
ToOwned,
|
||||||
ToString,
|
ToString,
|
||||||
TokenStream,
|
TokenStream,
|
||||||
|
@ -1823,7 +1824,6 @@ symbols! {
|
||||||
rustc_lint_opt_ty,
|
rustc_lint_opt_ty,
|
||||||
rustc_lint_query_instability,
|
rustc_lint_query_instability,
|
||||||
rustc_lint_untracked_query_information,
|
rustc_lint_untracked_query_information,
|
||||||
rustc_macro_edition_2021,
|
|
||||||
rustc_macro_transparency,
|
rustc_macro_transparency,
|
||||||
rustc_main,
|
rustc_main,
|
||||||
rustc_mir,
|
rustc_mir,
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
use crate::spec::{LinkSelfContainedDefault, TargetOptions, base, crt_objects};
|
use crate::spec::{LinkSelfContainedDefault, TargetOptions, base, crt_objects};
|
||||||
|
|
||||||
pub(crate) fn opts() -> TargetOptions {
|
pub(crate) fn opts() -> TargetOptions {
|
||||||
let mut base = base::linux::opts();
|
TargetOptions {
|
||||||
|
env: "musl".into(),
|
||||||
base.env = "musl".into();
|
pre_link_objects_self_contained: crt_objects::pre_musl_self_contained(),
|
||||||
base.pre_link_objects_self_contained = crt_objects::pre_musl_self_contained();
|
post_link_objects_self_contained: crt_objects::post_musl_self_contained(),
|
||||||
base.post_link_objects_self_contained = crt_objects::post_musl_self_contained();
|
link_self_contained: LinkSelfContainedDefault::InferredForMusl,
|
||||||
base.link_self_contained = LinkSelfContainedDefault::InferredForMusl;
|
..base::linux::opts()
|
||||||
|
}
|
||||||
base
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
use crate::spec::{TargetOptions, TlsModel, base};
|
use crate::spec::{TargetOptions, TlsModel, base};
|
||||||
|
|
||||||
pub(crate) fn opts() -> TargetOptions {
|
pub(crate) fn opts() -> TargetOptions {
|
||||||
let mut base = base::linux::opts();
|
TargetOptions {
|
||||||
|
env: "ohos".into(),
|
||||||
base.env = "ohos".into();
|
crt_static_default: false,
|
||||||
base.crt_static_default = false;
|
tls_model: TlsModel::Emulated,
|
||||||
base.tls_model = TlsModel::Emulated;
|
has_thread_local: false,
|
||||||
base.has_thread_local = false;
|
..base::linux::opts()
|
||||||
|
}
|
||||||
base
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,12 @@ pub(crate) fn target() -> Target {
|
||||||
base.cpu = "pentium4".into();
|
base.cpu = "pentium4".into();
|
||||||
base.max_atomic_width = Some(64);
|
base.max_atomic_width = Some(64);
|
||||||
base.supported_sanitizers = SanitizerSet::ADDRESS;
|
base.supported_sanitizers = SanitizerSet::ADDRESS;
|
||||||
|
// On Windows 7 32-bit, the alignment characteristic of the TLS Directory
|
||||||
|
// don't appear to be respected by the PE Loader, leading to crashes. As
|
||||||
|
// a result, let's disable has_thread_local to make sure TLS goes through
|
||||||
|
// the emulation layer.
|
||||||
|
// See https://github.com/rust-lang/rust/issues/138903
|
||||||
|
base.has_thread_local = false;
|
||||||
|
|
||||||
base.add_pre_link_args(
|
base.add_pre_link_args(
|
||||||
LinkerFlavor::Msvc(Lld::No),
|
LinkerFlavor::Msvc(Lld::No),
|
||||||
|
|
|
@ -2,6 +2,8 @@ pub mod ambiguity;
|
||||||
pub mod call_kind;
|
pub mod call_kind;
|
||||||
mod fulfillment_errors;
|
mod fulfillment_errors;
|
||||||
pub mod on_unimplemented;
|
pub mod on_unimplemented;
|
||||||
|
pub mod on_unimplemented_condition;
|
||||||
|
pub mod on_unimplemented_format;
|
||||||
mod overflow;
|
mod overflow;
|
||||||
pub mod suggestions;
|
pub mod suggestions;
|
||||||
|
|
||||||
|
|
|
@ -1,44 +1,31 @@
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use rustc_ast::MetaItemInner;
|
use rustc_ast::{LitKind, MetaItem, MetaItemInner, MetaItemKind, MetaItemLit};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
|
||||||
use rustc_errors::codes::*;
|
use rustc_errors::codes::*;
|
||||||
use rustc_errors::{ErrorGuaranteed, struct_span_code_err};
|
use rustc_errors::{ErrorGuaranteed, struct_span_code_err};
|
||||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||||
use rustc_hir::{AttrArgs, Attribute};
|
use rustc_hir::{AttrArgs, Attribute};
|
||||||
use rustc_macros::LintDiagnostic;
|
use rustc_macros::LintDiagnostic;
|
||||||
use rustc_middle::bug;
|
use rustc_middle::bug;
|
||||||
use rustc_middle::ty::print::PrintTraitRefExt as _;
|
use rustc_middle::ty::print::PrintTraitRefExt;
|
||||||
use rustc_middle::ty::{self, GenericArgsRef, GenericParamDefKind, TyCtxt};
|
use rustc_middle::ty::{self, GenericArgsRef, GenericParamDef, GenericParamDefKind, TyCtxt};
|
||||||
use rustc_parse_format::{ParseMode, Parser, Piece, Position};
|
|
||||||
use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES;
|
use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES;
|
||||||
use rustc_span::{Ident, Span, Symbol, kw, sym};
|
use rustc_span::{Span, Symbol, sym};
|
||||||
use tracing::{debug, info};
|
use tracing::{debug, info};
|
||||||
use {rustc_attr_parsing as attr, rustc_hir as hir};
|
use {rustc_attr_parsing as attr, rustc_hir as hir};
|
||||||
|
|
||||||
use super::{ObligationCauseCode, PredicateObligation};
|
use super::{ObligationCauseCode, PredicateObligation};
|
||||||
use crate::error_reporting::TypeErrCtxt;
|
use crate::error_reporting::TypeErrCtxt;
|
||||||
|
use crate::error_reporting::traits::on_unimplemented_condition::{Condition, ConditionOptions};
|
||||||
|
use crate::error_reporting::traits::on_unimplemented_format::{
|
||||||
|
Ctx, FormatArgs, FormatString, FormatWarning,
|
||||||
|
};
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
|
EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
|
||||||
};
|
};
|
||||||
use crate::infer::InferCtxtExt;
|
use crate::infer::InferCtxtExt;
|
||||||
|
|
||||||
/// The symbols which are always allowed in a format string
|
|
||||||
static ALLOWED_FORMAT_SYMBOLS: &[Symbol] = &[
|
|
||||||
kw::SelfUpper,
|
|
||||||
sym::ItemContext,
|
|
||||||
sym::from_desugaring,
|
|
||||||
sym::direct,
|
|
||||||
sym::cause,
|
|
||||||
sym::integral,
|
|
||||||
sym::integer_,
|
|
||||||
sym::float,
|
|
||||||
sym::_Self,
|
|
||||||
sym::crate_local,
|
|
||||||
sym::Trait,
|
|
||||||
];
|
|
||||||
|
|
||||||
impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
fn impl_similar_to(
|
fn impl_similar_to(
|
||||||
&self,
|
&self,
|
||||||
|
@ -121,86 +108,78 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
.unwrap_or_else(|| (trait_pred.def_id(), trait_pred.skip_binder().trait_ref.args));
|
.unwrap_or_else(|| (trait_pred.def_id(), trait_pred.skip_binder().trait_ref.args));
|
||||||
let trait_pred = trait_pred.skip_binder();
|
let trait_pred = trait_pred.skip_binder();
|
||||||
|
|
||||||
let mut flags = vec![];
|
let mut self_types = vec![];
|
||||||
|
let mut generic_args: Vec<(Symbol, String)> = vec![];
|
||||||
|
let mut crate_local = false;
|
||||||
// FIXME(-Zlower-impl-trait-in-trait-to-assoc-ty): HIR is not present for RPITITs,
|
// FIXME(-Zlower-impl-trait-in-trait-to-assoc-ty): HIR is not present for RPITITs,
|
||||||
// but I guess we could synthesize one here. We don't see any errors that rely on
|
// but I guess we could synthesize one here. We don't see any errors that rely on
|
||||||
// that yet, though.
|
// that yet, though.
|
||||||
let enclosure = self.describe_enclosure(obligation.cause.body_id).map(|t| t.to_owned());
|
let item_context = self.describe_enclosure(obligation.cause.body_id).unwrap_or("");
|
||||||
flags.push((sym::ItemContext, enclosure));
|
|
||||||
|
|
||||||
match obligation.cause.code() {
|
let direct = match obligation.cause.code() {
|
||||||
ObligationCauseCode::BuiltinDerived(..)
|
ObligationCauseCode::BuiltinDerived(..)
|
||||||
| ObligationCauseCode::ImplDerived(..)
|
| ObligationCauseCode::ImplDerived(..)
|
||||||
| ObligationCauseCode::WellFormedDerived(..) => {}
|
| ObligationCauseCode::WellFormedDerived(..) => false,
|
||||||
_ => {
|
_ => {
|
||||||
// this is a "direct", user-specified, rather than derived,
|
// this is a "direct", user-specified, rather than derived,
|
||||||
// obligation.
|
// obligation.
|
||||||
flags.push((sym::direct, None));
|
true
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(k) = obligation.cause.span.desugaring_kind() {
|
let from_desugaring = obligation.cause.span.desugaring_kind();
|
||||||
flags.push((sym::from_desugaring, None));
|
|
||||||
flags.push((sym::from_desugaring, Some(format!("{k:?}"))));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let ObligationCauseCode::MainFunctionType = obligation.cause.code() {
|
let cause = if let ObligationCauseCode::MainFunctionType = obligation.cause.code() {
|
||||||
flags.push((sym::cause, Some("MainFunctionType".to_string())));
|
Some("MainFunctionType".to_string())
|
||||||
}
|
} else {
|
||||||
|
None
|
||||||
flags.push((sym::Trait, Some(trait_pred.trait_ref.print_trait_sugared().to_string())));
|
};
|
||||||
|
|
||||||
// Add all types without trimmed paths or visible paths, ensuring they end up with
|
// Add all types without trimmed paths or visible paths, ensuring they end up with
|
||||||
// their "canonical" def path.
|
// their "canonical" def path.
|
||||||
ty::print::with_no_trimmed_paths!(ty::print::with_no_visible_paths!({
|
ty::print::with_no_trimmed_paths!(ty::print::with_no_visible_paths!({
|
||||||
let generics = self.tcx.generics_of(def_id);
|
let generics = self.tcx.generics_of(def_id);
|
||||||
let self_ty = trait_pred.self_ty();
|
let self_ty = trait_pred.self_ty();
|
||||||
// This is also included through the generics list as `Self`,
|
self_types.push(self_ty.to_string());
|
||||||
// but the parser won't allow you to use it
|
|
||||||
flags.push((sym::_Self, Some(self_ty.to_string())));
|
|
||||||
if let Some(def) = self_ty.ty_adt_def() {
|
if let Some(def) = self_ty.ty_adt_def() {
|
||||||
// We also want to be able to select self's original
|
// We also want to be able to select self's original
|
||||||
// signature with no type arguments resolved
|
// signature with no type arguments resolved
|
||||||
flags.push((
|
self_types.push(self.tcx.type_of(def.did()).instantiate_identity().to_string());
|
||||||
sym::_Self,
|
|
||||||
Some(self.tcx.type_of(def.did()).instantiate_identity().to_string()),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for param in generics.own_params.iter() {
|
for GenericParamDef { name, kind, index, .. } in generics.own_params.iter() {
|
||||||
let value = match param.kind {
|
let value = match kind {
|
||||||
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
||||||
args[param.index as usize].to_string()
|
args[*index as usize].to_string()
|
||||||
}
|
}
|
||||||
GenericParamDefKind::Lifetime => continue,
|
GenericParamDefKind::Lifetime => continue,
|
||||||
};
|
};
|
||||||
let name = param.name;
|
generic_args.push((*name, value));
|
||||||
flags.push((name, Some(value)));
|
|
||||||
|
|
||||||
if let GenericParamDefKind::Type { .. } = param.kind {
|
if let GenericParamDefKind::Type { .. } = kind {
|
||||||
let param_ty = args[param.index as usize].expect_ty();
|
let param_ty = args[*index as usize].expect_ty();
|
||||||
if let Some(def) = param_ty.ty_adt_def() {
|
if let Some(def) = param_ty.ty_adt_def() {
|
||||||
// We also want to be able to select the parameter's
|
// We also want to be able to select the parameter's
|
||||||
// original signature with no type arguments resolved
|
// original signature with no type arguments resolved
|
||||||
flags.push((
|
generic_args.push((
|
||||||
name,
|
*name,
|
||||||
Some(self.tcx.type_of(def.did()).instantiate_identity().to_string()),
|
self.tcx.type_of(def.did()).instantiate_identity().to_string(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(true) = self_ty.ty_adt_def().map(|def| def.did().is_local()) {
|
if let Some(true) = self_ty.ty_adt_def().map(|def| def.did().is_local()) {
|
||||||
flags.push((sym::crate_local, None));
|
crate_local = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allow targeting all integers using `{integral}`, even if the exact type was resolved
|
// Allow targeting all integers using `{integral}`, even if the exact type was resolved
|
||||||
if self_ty.is_integral() {
|
if self_ty.is_integral() {
|
||||||
flags.push((sym::_Self, Some("{integral}".to_owned())));
|
self_types.push("{integral}".to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
if self_ty.is_array_slice() {
|
if self_ty.is_array_slice() {
|
||||||
flags.push((sym::_Self, Some("&[]".to_owned())));
|
self_types.push("&[]".to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
if self_ty.is_fn() {
|
if self_ty.is_fn() {
|
||||||
|
@ -215,53 +194,51 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
hir::Safety::Unsafe => "unsafe fn",
|
hir::Safety::Unsafe => "unsafe fn",
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
flags.push((sym::_Self, Some(shortname.to_owned())));
|
self_types.push(shortname.to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Slices give us `[]`, `[{ty}]`
|
// Slices give us `[]`, `[{ty}]`
|
||||||
if let ty::Slice(aty) = self_ty.kind() {
|
if let ty::Slice(aty) = self_ty.kind() {
|
||||||
flags.push((sym::_Self, Some("[]".to_string())));
|
self_types.push("[]".to_owned());
|
||||||
if let Some(def) = aty.ty_adt_def() {
|
if let Some(def) = aty.ty_adt_def() {
|
||||||
// We also want to be able to select the slice's type's original
|
// We also want to be able to select the slice's type's original
|
||||||
// signature with no type arguments resolved
|
// signature with no type arguments resolved
|
||||||
flags.push((
|
self_types
|
||||||
sym::_Self,
|
.push(format!("[{}]", self.tcx.type_of(def.did()).instantiate_identity()));
|
||||||
Some(format!("[{}]", self.tcx.type_of(def.did()).instantiate_identity())),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
if aty.is_integral() {
|
if aty.is_integral() {
|
||||||
flags.push((sym::_Self, Some("[{integral}]".to_string())));
|
self_types.push("[{integral}]".to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Arrays give us `[]`, `[{ty}; _]` and `[{ty}; N]`
|
// Arrays give us `[]`, `[{ty}; _]` and `[{ty}; N]`
|
||||||
if let ty::Array(aty, len) = self_ty.kind() {
|
if let ty::Array(aty, len) = self_ty.kind() {
|
||||||
flags.push((sym::_Self, Some("[]".to_string())));
|
self_types.push("[]".to_string());
|
||||||
let len = len.try_to_target_usize(self.tcx);
|
let len = len.try_to_target_usize(self.tcx);
|
||||||
flags.push((sym::_Self, Some(format!("[{aty}; _]"))));
|
self_types.push(format!("[{aty}; _]"));
|
||||||
if let Some(n) = len {
|
if let Some(n) = len {
|
||||||
flags.push((sym::_Self, Some(format!("[{aty}; {n}]"))));
|
self_types.push(format!("[{aty}; {n}]"));
|
||||||
}
|
}
|
||||||
if let Some(def) = aty.ty_adt_def() {
|
if let Some(def) = aty.ty_adt_def() {
|
||||||
// We also want to be able to select the array's type's original
|
// We also want to be able to select the array's type's original
|
||||||
// signature with no type arguments resolved
|
// signature with no type arguments resolved
|
||||||
let def_ty = self.tcx.type_of(def.did()).instantiate_identity();
|
let def_ty = self.tcx.type_of(def.did()).instantiate_identity();
|
||||||
flags.push((sym::_Self, Some(format!("[{def_ty}; _]"))));
|
self_types.push(format!("[{def_ty}; _]"));
|
||||||
if let Some(n) = len {
|
if let Some(n) = len {
|
||||||
flags.push((sym::_Self, Some(format!("[{def_ty}; {n}]"))));
|
self_types.push(format!("[{def_ty}; {n}]"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if aty.is_integral() {
|
if aty.is_integral() {
|
||||||
flags.push((sym::_Self, Some("[{integral}; _]".to_string())));
|
self_types.push("[{integral}; _]".to_string());
|
||||||
if let Some(n) = len {
|
if let Some(n) = len {
|
||||||
flags.push((sym::_Self, Some(format!("[{{integral}}; {n}]"))));
|
self_types.push(format!("[{{integral}}; {n}]"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let ty::Dynamic(traits, _, _) = self_ty.kind() {
|
if let ty::Dynamic(traits, _, _) = self_ty.kind() {
|
||||||
for t in traits.iter() {
|
for t in traits.iter() {
|
||||||
if let ty::ExistentialPredicate::Trait(trait_ref) = t.skip_binder() {
|
if let ty::ExistentialPredicate::Trait(trait_ref) = t.skip_binder() {
|
||||||
flags.push((sym::_Self, Some(self.tcx.def_path_str(trait_ref.def_id))))
|
self_types.push(self.tcx.def_path_str(trait_ref.def_id));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -271,31 +248,76 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
&& let ty::Slice(sty) = ref_ty.kind()
|
&& let ty::Slice(sty) = ref_ty.kind()
|
||||||
&& sty.is_integral()
|
&& sty.is_integral()
|
||||||
{
|
{
|
||||||
flags.push((sym::_Self, Some("&[{integral}]".to_owned())));
|
self_types.push("&[{integral}]".to_owned());
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
let this = self.tcx.def_path_str(trait_pred.trait_ref.def_id);
|
||||||
|
let trait_sugared = trait_pred.trait_ref.print_trait_sugared();
|
||||||
|
|
||||||
|
let condition_options = ConditionOptions {
|
||||||
|
self_types,
|
||||||
|
from_desugaring,
|
||||||
|
cause,
|
||||||
|
crate_local,
|
||||||
|
direct,
|
||||||
|
generic_args,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Unlike the generic_args earlier,
|
||||||
|
// this one is *not* collected under `with_no_trimmed_paths!`
|
||||||
|
// for printing the type to the user
|
||||||
|
//
|
||||||
|
// This includes `Self`, as it is the first parameter in `own_params`.
|
||||||
|
let generic_args = self
|
||||||
|
.tcx
|
||||||
|
.generics_of(trait_pred.trait_ref.def_id)
|
||||||
|
.own_params
|
||||||
|
.iter()
|
||||||
|
.filter_map(|param| {
|
||||||
|
let value = match param.kind {
|
||||||
|
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
||||||
|
if let Some(ty) = trait_pred.trait_ref.args[param.index as usize].as_type()
|
||||||
|
{
|
||||||
|
self.tcx.short_string(ty, long_ty_file)
|
||||||
|
} else {
|
||||||
|
trait_pred.trait_ref.args[param.index as usize].to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
GenericParamDefKind::Lifetime => return None,
|
||||||
|
};
|
||||||
|
let name = param.name;
|
||||||
|
Some((name, value))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let format_args = FormatArgs { this, trait_sugared, generic_args, item_context };
|
||||||
|
|
||||||
if let Ok(Some(command)) = OnUnimplementedDirective::of_item(self.tcx, def_id) {
|
if let Ok(Some(command)) = OnUnimplementedDirective::of_item(self.tcx, def_id) {
|
||||||
command.evaluate(self.tcx, trait_pred.trait_ref, &flags, long_ty_file)
|
command.evaluate(self.tcx, trait_pred.trait_ref, &condition_options, &format_args)
|
||||||
} else {
|
} else {
|
||||||
OnUnimplementedNote::default()
|
OnUnimplementedNote::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Represents a format string in a on_unimplemented attribute,
|
||||||
|
/// like the "content" in `#[diagnostic::on_unimplemented(message = "content")]`
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct OnUnimplementedFormatString {
|
pub struct OnUnimplementedFormatString {
|
||||||
symbol: Symbol,
|
/// Symbol of the format string, i.e. `"content"`
|
||||||
span: Span,
|
pub symbol: Symbol,
|
||||||
is_diagnostic_namespace_variant: bool,
|
///The span of the format string, i.e. `"content"`
|
||||||
|
pub span: Span,
|
||||||
|
pub is_diagnostic_namespace_variant: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct OnUnimplementedDirective {
|
pub struct OnUnimplementedDirective {
|
||||||
pub condition: Option<MetaItemInner>,
|
pub condition: Option<Condition>,
|
||||||
pub subcommands: Vec<OnUnimplementedDirective>,
|
pub subcommands: Vec<OnUnimplementedDirective>,
|
||||||
pub message: Option<OnUnimplementedFormatString>,
|
pub message: Option<(Span, OnUnimplementedFormatString)>,
|
||||||
pub label: Option<OnUnimplementedFormatString>,
|
pub label: Option<(Span, OnUnimplementedFormatString)>,
|
||||||
pub notes: Vec<OnUnimplementedFormatString>,
|
pub notes: Vec<OnUnimplementedFormatString>,
|
||||||
pub parent_label: Option<OnUnimplementedFormatString>,
|
pub parent_label: Option<OnUnimplementedFormatString>,
|
||||||
pub append_const_msg: Option<AppendConstMessage>,
|
pub append_const_msg: Option<AppendConstMessage>,
|
||||||
|
@ -329,7 +351,7 @@ pub struct MalformedOnUnimplementedAttrLint {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MalformedOnUnimplementedAttrLint {
|
impl MalformedOnUnimplementedAttrLint {
|
||||||
fn new(span: Span) -> Self {
|
pub fn new(span: Span) -> Self {
|
||||||
Self { span }
|
Self { span }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -350,7 +372,7 @@ pub struct IgnoredDiagnosticOption {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IgnoredDiagnosticOption {
|
impl IgnoredDiagnosticOption {
|
||||||
fn maybe_emit_warning<'tcx>(
|
pub fn maybe_emit_warning<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
item_def_id: DefId,
|
item_def_id: DefId,
|
||||||
new: Option<Span>,
|
new: Option<Span>,
|
||||||
|
@ -370,29 +392,11 @@ impl IgnoredDiagnosticOption {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(LintDiagnostic)]
|
|
||||||
#[diag(trait_selection_unknown_format_parameter_for_on_unimplemented_attr)]
|
|
||||||
#[help]
|
|
||||||
pub struct UnknownFormatParameterForOnUnimplementedAttr {
|
|
||||||
argument_name: Symbol,
|
|
||||||
trait_name: Ident,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(LintDiagnostic)]
|
|
||||||
#[diag(trait_selection_disallowed_positional_argument)]
|
|
||||||
#[help]
|
|
||||||
pub struct DisallowedPositionalArgument;
|
|
||||||
|
|
||||||
#[derive(LintDiagnostic)]
|
|
||||||
#[diag(trait_selection_invalid_format_specifier)]
|
|
||||||
#[help]
|
|
||||||
pub struct InvalidFormatSpecifier;
|
|
||||||
|
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
#[diag(trait_selection_wrapped_parser_error)]
|
#[diag(trait_selection_wrapped_parser_error)]
|
||||||
pub struct WrappedParserError {
|
pub struct WrappedParserError {
|
||||||
description: String,
|
pub description: String,
|
||||||
label: String,
|
pub label: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> OnUnimplementedDirective {
|
impl<'tcx> OnUnimplementedDirective {
|
||||||
|
@ -407,12 +411,12 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
let mut errored = None;
|
let mut errored = None;
|
||||||
let mut item_iter = items.iter();
|
let mut item_iter = items.iter();
|
||||||
|
|
||||||
let parse_value = |value_str, value_span| {
|
let parse_value = |value_str, span| {
|
||||||
OnUnimplementedFormatString::try_parse(
|
OnUnimplementedFormatString::try_parse(
|
||||||
tcx,
|
tcx,
|
||||||
item_def_id,
|
item_def_id,
|
||||||
value_str,
|
value_str,
|
||||||
value_span,
|
span,
|
||||||
is_diagnostic_namespace_variant,
|
is_diagnostic_namespace_variant,
|
||||||
)
|
)
|
||||||
.map(Some)
|
.map(Some)
|
||||||
|
@ -434,7 +438,7 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
Some(cond.clone())
|
Some(Condition { inner: cond.clone() })
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut message = None;
|
let mut message = None;
|
||||||
|
@ -444,24 +448,36 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
let mut subcommands = vec![];
|
let mut subcommands = vec![];
|
||||||
let mut append_const_msg = None;
|
let mut append_const_msg = None;
|
||||||
|
|
||||||
|
let get_value_and_span = |item: &_, key| {
|
||||||
|
if let MetaItemInner::MetaItem(MetaItem {
|
||||||
|
path,
|
||||||
|
kind: MetaItemKind::NameValue(MetaItemLit { span, kind: LitKind::Str(s, _), .. }),
|
||||||
|
..
|
||||||
|
}) = item
|
||||||
|
&& *path == key
|
||||||
|
{
|
||||||
|
Some((*s, *span))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
for item in item_iter {
|
for item in item_iter {
|
||||||
if item.has_name(sym::message) && message.is_none() {
|
if let Some((message_, span)) = get_value_and_span(item, sym::message)
|
||||||
if let Some(message_) = item.value_str() {
|
&& message.is_none()
|
||||||
message = parse_value(message_, item.span())?;
|
{
|
||||||
|
message = parse_value(message_, span)?.map(|l| (item.span(), l));
|
||||||
continue;
|
continue;
|
||||||
}
|
} else if let Some((label_, span)) = get_value_and_span(item, sym::label)
|
||||||
} else if item.has_name(sym::label) && label.is_none() {
|
&& label.is_none()
|
||||||
if let Some(label_) = item.value_str() {
|
{
|
||||||
label = parse_value(label_, item.span())?;
|
label = parse_value(label_, span)?.map(|l| (item.span(), l));
|
||||||
continue;
|
continue;
|
||||||
}
|
} else if let Some((note_, span)) = get_value_and_span(item, sym::note) {
|
||||||
} else if item.has_name(sym::note) {
|
if let Some(note) = parse_value(note_, span)? {
|
||||||
if let Some(note_) = item.value_str() {
|
|
||||||
if let Some(note) = parse_value(note_, item.span())? {
|
|
||||||
notes.push(note);
|
notes.push(note);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} else if item.has_name(sym::parent_label)
|
} else if item.has_name(sym::parent_label)
|
||||||
&& parent_label.is_none()
|
&& parent_label.is_none()
|
||||||
&& !is_diagnostic_namespace_variant
|
&& !is_diagnostic_namespace_variant
|
||||||
|
@ -539,6 +555,13 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<Option<Self>, ErrorGuaranteed> {
|
pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<Option<Self>, ErrorGuaranteed> {
|
||||||
|
if !tcx.is_trait(item_def_id) {
|
||||||
|
// It could be a trait_alias (`trait MyTrait = SomeOtherTrait`)
|
||||||
|
// or an implementation (`impl MyTrait for Foo {}`)
|
||||||
|
//
|
||||||
|
// We don't support those.
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
if let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) {
|
if let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) {
|
||||||
return Self::parse_attribute(attr, false, tcx, item_def_id);
|
return Self::parse_attribute(attr, false, tcx, item_def_id);
|
||||||
} else {
|
} else {
|
||||||
|
@ -554,15 +577,15 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
IgnoredDiagnosticOption::maybe_emit_warning(
|
IgnoredDiagnosticOption::maybe_emit_warning(
|
||||||
tcx,
|
tcx,
|
||||||
item_def_id,
|
item_def_id,
|
||||||
directive.message.as_ref().map(|f| f.span),
|
directive.message.as_ref().map(|f| f.0),
|
||||||
aggr.message.as_ref().map(|f| f.span),
|
aggr.message.as_ref().map(|f| f.0),
|
||||||
"message",
|
"message",
|
||||||
);
|
);
|
||||||
IgnoredDiagnosticOption::maybe_emit_warning(
|
IgnoredDiagnosticOption::maybe_emit_warning(
|
||||||
tcx,
|
tcx,
|
||||||
item_def_id,
|
item_def_id,
|
||||||
directive.label.as_ref().map(|f| f.span),
|
directive.label.as_ref().map(|f| f.0),
|
||||||
aggr.label.as_ref().map(|f| f.span),
|
aggr.label.as_ref().map(|f| f.0),
|
||||||
"label",
|
"label",
|
||||||
);
|
);
|
||||||
IgnoredDiagnosticOption::maybe_emit_warning(
|
IgnoredDiagnosticOption::maybe_emit_warning(
|
||||||
|
@ -636,13 +659,16 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
condition: None,
|
condition: None,
|
||||||
message: None,
|
message: None,
|
||||||
subcommands: vec![],
|
subcommands: vec![],
|
||||||
label: Some(OnUnimplementedFormatString::try_parse(
|
label: Some((
|
||||||
|
attr.span(),
|
||||||
|
OnUnimplementedFormatString::try_parse(
|
||||||
tcx,
|
tcx,
|
||||||
item_def_id,
|
item_def_id,
|
||||||
value,
|
value,
|
||||||
attr.span(),
|
attr.value_span().unwrap_or(attr.span()),
|
||||||
is_diagnostic_namespace_variant,
|
is_diagnostic_namespace_variant,
|
||||||
)?),
|
)?,
|
||||||
|
)),
|
||||||
notes: Vec::new(),
|
notes: Vec::new(),
|
||||||
parent_label: None,
|
parent_label: None,
|
||||||
append_const_msg: None,
|
append_const_msg: None,
|
||||||
|
@ -702,43 +728,23 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
&self,
|
&self,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
trait_ref: ty::TraitRef<'tcx>,
|
trait_ref: ty::TraitRef<'tcx>,
|
||||||
options: &[(Symbol, Option<String>)],
|
condition_options: &ConditionOptions,
|
||||||
long_ty_file: &mut Option<PathBuf>,
|
args: &FormatArgs<'tcx>,
|
||||||
) -> OnUnimplementedNote {
|
) -> OnUnimplementedNote {
|
||||||
let mut message = None;
|
let mut message = None;
|
||||||
let mut label = None;
|
let mut label = None;
|
||||||
let mut notes = Vec::new();
|
let mut notes = Vec::new();
|
||||||
let mut parent_label = None;
|
let mut parent_label = None;
|
||||||
let mut append_const_msg = None;
|
let mut append_const_msg = None;
|
||||||
info!("evaluate({:?}, trait_ref={:?}, options={:?})", self, trait_ref, options);
|
info!(
|
||||||
|
"evaluate({:?}, trait_ref={:?}, options={:?}, args ={:?})",
|
||||||
let options_map: FxHashMap<Symbol, String> =
|
self, trait_ref, condition_options, args
|
||||||
options.iter().filter_map(|(k, v)| v.clone().map(|v| (*k, v))).collect();
|
);
|
||||||
|
|
||||||
for command in self.subcommands.iter().chain(Some(self)).rev() {
|
for command in self.subcommands.iter().chain(Some(self)).rev() {
|
||||||
debug!(?command);
|
debug!(?command);
|
||||||
if let Some(ref condition) = command.condition
|
if let Some(ref condition) = command.condition
|
||||||
&& !attr::eval_condition(condition, &tcx.sess, Some(tcx.features()), &mut |cfg| {
|
&& !condition.matches_predicate(tcx, condition_options)
|
||||||
let value = cfg.value.map(|v| {
|
|
||||||
// `with_no_visible_paths` is also used when generating the options,
|
|
||||||
// so we need to match it here.
|
|
||||||
ty::print::with_no_visible_paths!(
|
|
||||||
OnUnimplementedFormatString {
|
|
||||||
symbol: v,
|
|
||||||
span: cfg.span,
|
|
||||||
is_diagnostic_namespace_variant: false
|
|
||||||
}
|
|
||||||
.format(
|
|
||||||
tcx,
|
|
||||||
trait_ref,
|
|
||||||
&options_map,
|
|
||||||
long_ty_file
|
|
||||||
)
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
options.contains(&(cfg.name, value))
|
|
||||||
})
|
|
||||||
{
|
{
|
||||||
debug!("evaluate: skipping {:?} due to condition", command);
|
debug!("evaluate: skipping {:?} due to condition", command);
|
||||||
continue;
|
continue;
|
||||||
|
@ -762,14 +768,10 @@ impl<'tcx> OnUnimplementedDirective {
|
||||||
}
|
}
|
||||||
|
|
||||||
OnUnimplementedNote {
|
OnUnimplementedNote {
|
||||||
label: label.map(|l| l.format(tcx, trait_ref, &options_map, long_ty_file)),
|
label: label.map(|l| l.1.format(tcx, trait_ref, args)),
|
||||||
message: message.map(|m| m.format(tcx, trait_ref, &options_map, long_ty_file)),
|
message: message.map(|m| m.1.format(tcx, trait_ref, args)),
|
||||||
notes: notes
|
notes: notes.into_iter().map(|n| n.format(tcx, trait_ref, args)).collect(),
|
||||||
.into_iter()
|
parent_label: parent_label.map(|e_s| e_s.format(tcx, trait_ref, args)),
|
||||||
.map(|n| n.format(tcx, trait_ref, &options_map, long_ty_file))
|
|
||||||
.collect(),
|
|
||||||
parent_label: parent_label
|
|
||||||
.map(|e_s| e_s.format(tcx, trait_ref, &options_map, long_ty_file)),
|
|
||||||
append_const_msg,
|
append_const_msg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -780,106 +782,81 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
item_def_id: DefId,
|
item_def_id: DefId,
|
||||||
from: Symbol,
|
from: Symbol,
|
||||||
value_span: Span,
|
span: Span,
|
||||||
is_diagnostic_namespace_variant: bool,
|
is_diagnostic_namespace_variant: bool,
|
||||||
) -> Result<Self, ErrorGuaranteed> {
|
) -> Result<Self, ErrorGuaranteed> {
|
||||||
let result = OnUnimplementedFormatString {
|
let result =
|
||||||
symbol: from,
|
OnUnimplementedFormatString { symbol: from, span, is_diagnostic_namespace_variant };
|
||||||
span: value_span,
|
|
||||||
is_diagnostic_namespace_variant,
|
|
||||||
};
|
|
||||||
result.verify(tcx, item_def_id)?;
|
result.verify(tcx, item_def_id)?;
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify(&self, tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<(), ErrorGuaranteed> {
|
fn verify(&self, tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> Result<(), ErrorGuaranteed> {
|
||||||
let trait_def_id = if tcx.is_trait(item_def_id) {
|
if !tcx.is_trait(trait_def_id) {
|
||||||
item_def_id
|
return Ok(());
|
||||||
} else {
|
|
||||||
tcx.trait_id_of_impl(item_def_id)
|
|
||||||
.expect("expected `on_unimplemented` to correspond to a trait")
|
|
||||||
};
|
};
|
||||||
let trait_name = tcx.item_ident(trait_def_id);
|
|
||||||
let generics = tcx.generics_of(item_def_id);
|
let ctx = if self.is_diagnostic_namespace_variant {
|
||||||
let s = self.symbol.as_str();
|
Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id }
|
||||||
let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
|
} else {
|
||||||
|
Ctx::RustcOnUnimplemented { tcx, trait_def_id }
|
||||||
|
};
|
||||||
|
|
||||||
let mut result = Ok(());
|
let mut result = Ok(());
|
||||||
for token in &mut parser {
|
|
||||||
match token {
|
match FormatString::parse(self.symbol, self.span, &ctx) {
|
||||||
Piece::Lit(_) => (), // Normal string, no need to check it
|
// Warnings about format specifiers, deprecated parameters, wrong parameters etc.
|
||||||
Piece::NextArgument(a) => {
|
// In other words we'd like to let the author know, but we can still try to format the string later
|
||||||
let format_spec = a.format;
|
Ok(FormatString { warnings, .. }) => {
|
||||||
if self.is_diagnostic_namespace_variant
|
|
||||||
&& (format_spec.ty_span.is_some()
|
|
||||||
|| format_spec.width_span.is_some()
|
|
||||||
|| format_spec.precision_span.is_some()
|
|
||||||
|| format_spec.fill_span.is_some())
|
|
||||||
{
|
|
||||||
if let Some(item_def_id) = item_def_id.as_local() {
|
|
||||||
tcx.emit_node_span_lint(
|
|
||||||
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
|
||||||
tcx.local_def_id_to_hir_id(item_def_id),
|
|
||||||
self.span,
|
|
||||||
InvalidFormatSpecifier,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
match a.position {
|
|
||||||
Position::ArgumentNamed(s) => {
|
|
||||||
match Symbol::intern(s) {
|
|
||||||
// `{ThisTraitsName}` is allowed
|
|
||||||
s if s == trait_name.name
|
|
||||||
&& !self.is_diagnostic_namespace_variant =>
|
|
||||||
{
|
|
||||||
()
|
|
||||||
}
|
|
||||||
s if ALLOWED_FORMAT_SYMBOLS.contains(&s)
|
|
||||||
&& !self.is_diagnostic_namespace_variant =>
|
|
||||||
{
|
|
||||||
()
|
|
||||||
}
|
|
||||||
// So is `{A}` if A is a type parameter
|
|
||||||
s if generics.own_params.iter().any(|param| param.name == s) => (),
|
|
||||||
s => {
|
|
||||||
if self.is_diagnostic_namespace_variant {
|
if self.is_diagnostic_namespace_variant {
|
||||||
if let Some(item_def_id) = item_def_id.as_local() {
|
for w in warnings {
|
||||||
tcx.emit_node_span_lint(
|
w.emit_warning(tcx, trait_def_id)
|
||||||
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
|
||||||
tcx.local_def_id_to_hir_id(item_def_id),
|
|
||||||
self.span,
|
|
||||||
UnknownFormatParameterForOnUnimplementedAttr {
|
|
||||||
argument_name: s,
|
|
||||||
trait_name,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
result = Err(struct_span_code_err!(
|
for w in warnings {
|
||||||
|
match w {
|
||||||
|
FormatWarning::UnknownParam { argument_name, span } => {
|
||||||
|
let reported = struct_span_code_err!(
|
||||||
tcx.dcx(),
|
tcx.dcx(),
|
||||||
self.span,
|
span,
|
||||||
E0230,
|
E0230,
|
||||||
"there is no parameter `{}` on {}",
|
"cannot find parameter {} on this trait",
|
||||||
s,
|
argument_name,
|
||||||
if trait_def_id == item_def_id {
|
|
||||||
format!("trait `{trait_name}`")
|
|
||||||
} else {
|
|
||||||
"impl".to_string()
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
.emit());
|
.emit();
|
||||||
|
result = Err(reported);
|
||||||
|
}
|
||||||
|
FormatWarning::PositionalArgument { span, .. } => {
|
||||||
|
let reported = struct_span_code_err!(
|
||||||
|
tcx.dcx(),
|
||||||
|
span,
|
||||||
|
E0231,
|
||||||
|
"positional format arguments are not allowed here"
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
result = Err(reported);
|
||||||
|
}
|
||||||
|
FormatWarning::InvalidSpecifier { .. }
|
||||||
|
| FormatWarning::FutureIncompat { .. } => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// `{:1}` and `{}` are not to be used
|
// Errors from the underlying `rustc_parse_format::Parser`
|
||||||
Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => {
|
Err(errors) => {
|
||||||
|
// we cannot return errors from processing the format string as hard error here
|
||||||
|
// as the diagnostic namespace guarantees that malformed input cannot cause an error
|
||||||
|
//
|
||||||
|
// if we encounter any error while processing we nevertheless want to show it as warning
|
||||||
|
// so that users are aware that something is not correct
|
||||||
|
for e in errors {
|
||||||
if self.is_diagnostic_namespace_variant {
|
if self.is_diagnostic_namespace_variant {
|
||||||
if let Some(item_def_id) = item_def_id.as_local() {
|
if let Some(trait_def_id) = trait_def_id.as_local() {
|
||||||
tcx.emit_node_span_lint(
|
tcx.emit_node_span_lint(
|
||||||
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
||||||
tcx.local_def_id_to_hir_id(item_def_id),
|
tcx.local_def_id_to_hir_id(trait_def_id),
|
||||||
self.span,
|
self.span,
|
||||||
DisallowedPositionalArgument,
|
WrappedParserError { description: e.description, label: e.label },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -887,7 +864,8 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
tcx.dcx(),
|
tcx.dcx(),
|
||||||
self.span,
|
self.span,
|
||||||
E0231,
|
E0231,
|
||||||
"only named generic parameters are allowed"
|
"{}",
|
||||||
|
e.description,
|
||||||
)
|
)
|
||||||
.emit();
|
.emit();
|
||||||
result = Err(reported);
|
result = Err(reported);
|
||||||
|
@ -895,29 +873,6 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
// we cannot return errors from processing the format string as hard error here
|
|
||||||
// as the diagnostic namespace guarantees that malformed input cannot cause an error
|
|
||||||
//
|
|
||||||
// if we encounter any error while processing we nevertheless want to show it as warning
|
|
||||||
// so that users are aware that something is not correct
|
|
||||||
for e in parser.errors {
|
|
||||||
if self.is_diagnostic_namespace_variant {
|
|
||||||
if let Some(item_def_id) = item_def_id.as_local() {
|
|
||||||
tcx.emit_node_span_lint(
|
|
||||||
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
|
||||||
tcx.local_def_id_to_hir_id(item_def_id),
|
|
||||||
self.span,
|
|
||||||
WrappedParserError { description: e.description, label: e.label },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let reported =
|
|
||||||
struct_span_code_err!(tcx.dcx(), self.span, E0231, "{}", e.description,).emit();
|
|
||||||
result = Err(reported);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
@ -926,85 +881,18 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
&self,
|
&self,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
trait_ref: ty::TraitRef<'tcx>,
|
trait_ref: ty::TraitRef<'tcx>,
|
||||||
options: &FxHashMap<Symbol, String>,
|
args: &FormatArgs<'tcx>,
|
||||||
long_ty_file: &mut Option<PathBuf>,
|
|
||||||
) -> String {
|
) -> String {
|
||||||
let name = tcx.item_name(trait_ref.def_id);
|
let trait_def_id = trait_ref.def_id;
|
||||||
let trait_str = tcx.def_path_str(trait_ref.def_id);
|
let ctx = if self.is_diagnostic_namespace_variant {
|
||||||
let generics = tcx.generics_of(trait_ref.def_id);
|
Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id }
|
||||||
let generic_map = generics
|
|
||||||
.own_params
|
|
||||||
.iter()
|
|
||||||
.filter_map(|param| {
|
|
||||||
let value = match param.kind {
|
|
||||||
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
|
||||||
if let Some(ty) = trait_ref.args[param.index as usize].as_type() {
|
|
||||||
tcx.short_string(ty, long_ty_file)
|
|
||||||
} else {
|
} else {
|
||||||
trait_ref.args[param.index as usize].to_string()
|
Ctx::RustcOnUnimplemented { tcx, trait_def_id }
|
||||||
}
|
|
||||||
}
|
|
||||||
GenericParamDefKind::Lifetime => return None,
|
|
||||||
};
|
};
|
||||||
let name = param.name;
|
|
||||||
Some((name, value))
|
|
||||||
})
|
|
||||||
.collect::<FxHashMap<Symbol, String>>();
|
|
||||||
let empty_string = String::new();
|
|
||||||
|
|
||||||
let s = self.symbol.as_str();
|
if let Ok(s) = FormatString::parse(self.symbol, self.span, &ctx) {
|
||||||
let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
|
s.format(args)
|
||||||
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
|
|
||||||
let constructed_message = (&mut parser)
|
|
||||||
.map(|p| match p {
|
|
||||||
Piece::Lit(s) => s.to_owned(),
|
|
||||||
Piece::NextArgument(a) => match a.position {
|
|
||||||
Position::ArgumentNamed(arg) => {
|
|
||||||
let s = Symbol::intern(arg);
|
|
||||||
match generic_map.get(&s) {
|
|
||||||
Some(val) => val.to_string(),
|
|
||||||
None if self.is_diagnostic_namespace_variant => {
|
|
||||||
format!("{{{arg}}}")
|
|
||||||
}
|
|
||||||
None if s == name => trait_str.clone(),
|
|
||||||
None => {
|
|
||||||
if let Some(val) = options.get(&s) {
|
|
||||||
val.clone()
|
|
||||||
} else if s == sym::from_desugaring {
|
|
||||||
// don't break messages using these two arguments incorrectly
|
|
||||||
String::new()
|
|
||||||
} else if s == sym::ItemContext
|
|
||||||
&& !self.is_diagnostic_namespace_variant
|
|
||||||
{
|
|
||||||
item_context.clone()
|
|
||||||
} else if s == sym::integral {
|
|
||||||
String::from("{integral}")
|
|
||||||
} else if s == sym::integer_ {
|
|
||||||
String::from("{integer}")
|
|
||||||
} else if s == sym::float {
|
|
||||||
String::from("{float}")
|
|
||||||
} else {
|
} else {
|
||||||
bug!(
|
|
||||||
"broken on_unimplemented {:?} for {:?}: \
|
|
||||||
no argument matching {:?}",
|
|
||||||
self.symbol,
|
|
||||||
trait_ref,
|
|
||||||
s
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Position::ArgumentImplicitlyIs(_) if self.is_diagnostic_namespace_variant => {
|
|
||||||
String::from("{}")
|
|
||||||
}
|
|
||||||
Position::ArgumentIs(idx) if self.is_diagnostic_namespace_variant => {
|
|
||||||
format!("{{{idx}}}")
|
|
||||||
}
|
|
||||||
_ => bug!("broken on_unimplemented {:?} - bad format arg", self.symbol),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
// we cannot return errors from processing the format string as hard error here
|
// we cannot return errors from processing the format string as hard error here
|
||||||
// as the diagnostic namespace guarantees that malformed input cannot cause an error
|
// as the diagnostic namespace guarantees that malformed input cannot cause an error
|
||||||
//
|
//
|
||||||
|
@ -1014,10 +902,7 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
//
|
//
|
||||||
// The actual parser errors are emitted earlier
|
// The actual parser errors are emitted earlier
|
||||||
// as lint warnings in OnUnimplementedFormatString::verify
|
// as lint warnings in OnUnimplementedFormatString::verify
|
||||||
if self.is_diagnostic_namespace_variant && !parser.errors.is_empty() {
|
self.symbol.as_str().into()
|
||||||
String::from(s)
|
|
||||||
} else {
|
|
||||||
constructed_message
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,120 @@
|
||||||
|
use rustc_ast::MetaItemInner;
|
||||||
|
use rustc_attr_parsing as attr;
|
||||||
|
use rustc_middle::ty::{self, TyCtxt};
|
||||||
|
use rustc_parse_format::{ParseMode, Parser, Piece, Position};
|
||||||
|
use rustc_span::{DesugaringKind, Span, Symbol, kw, sym};
|
||||||
|
|
||||||
|
/// A predicate in an attribute using on, all, any,
|
||||||
|
/// similar to a cfg predicate.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Condition {
|
||||||
|
pub inner: MetaItemInner,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Condition {
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.inner.span()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn matches_predicate<'tcx>(&self, tcx: TyCtxt<'tcx>, options: &ConditionOptions) -> bool {
|
||||||
|
attr::eval_condition(&self.inner, tcx.sess, Some(tcx.features()), &mut |cfg| {
|
||||||
|
let value = cfg.value.map(|v| {
|
||||||
|
// `with_no_visible_paths` is also used when generating the options,
|
||||||
|
// so we need to match it here.
|
||||||
|
ty::print::with_no_visible_paths!({
|
||||||
|
Parser::new(v.as_str(), None, None, false, ParseMode::Format)
|
||||||
|
.map(|p| match p {
|
||||||
|
Piece::Lit(s) => s.to_owned(),
|
||||||
|
Piece::NextArgument(a) => match a.position {
|
||||||
|
Position::ArgumentNamed(arg) => {
|
||||||
|
let s = Symbol::intern(arg);
|
||||||
|
match options.generic_args.iter().find(|(k, _)| *k == s) {
|
||||||
|
Some((_, val)) => val.to_string(),
|
||||||
|
None => format!("{{{arg}}}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Position::ArgumentImplicitlyIs(_) => String::from("{}"),
|
||||||
|
Position::ArgumentIs(idx) => format!("{{{idx}}}"),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
options.contains(cfg.name, &value)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used with `Condition::matches_predicate` to test whether the condition applies
|
||||||
|
///
|
||||||
|
/// For example, given a
|
||||||
|
/// ```rust,ignore (just an example)
|
||||||
|
/// #[rustc_on_unimplemented(
|
||||||
|
/// on(all(from_desugaring = "QuestionMark"),
|
||||||
|
/// message = "the `?` operator can only be used in {ItemContext} \
|
||||||
|
/// that returns `Result` or `Option` \
|
||||||
|
/// (or another type that implements `{FromResidual}`)",
|
||||||
|
/// label = "cannot use the `?` operator in {ItemContext} that returns `{Self}`",
|
||||||
|
/// parent_label = "this function should return `Result` or `Option` to accept `?`"
|
||||||
|
/// ),
|
||||||
|
/// )]
|
||||||
|
/// pub trait FromResidual<R = <Self as Try>::Residual> {
|
||||||
|
/// ...
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// async fn an_async_function() -> u32 {
|
||||||
|
/// let x: Option<u32> = None;
|
||||||
|
/// x?; //~ ERROR the `?` operator
|
||||||
|
/// 22
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
/// it will look like this:
|
||||||
|
///
|
||||||
|
/// ```rust,ignore (just an example)
|
||||||
|
/// ConditionOptions {
|
||||||
|
/// self_types: ["u32", "{integral}"],
|
||||||
|
/// from_desugaring: Some("QuestionMark"),
|
||||||
|
/// cause: None,
|
||||||
|
/// crate_local: false,
|
||||||
|
/// direct: true,
|
||||||
|
/// generic_args: [("Self","u32"),
|
||||||
|
/// ("R", "core::option::Option<core::convert::Infallible>"),
|
||||||
|
/// ("R", "core::option::Option<T>" ),
|
||||||
|
/// ],
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ConditionOptions {
|
||||||
|
/// All the self types that may apply.
|
||||||
|
/// for example
|
||||||
|
pub self_types: Vec<String>,
|
||||||
|
// The kind of compiler desugaring.
|
||||||
|
pub from_desugaring: Option<DesugaringKind>,
|
||||||
|
/// Match on a variant of [rustc_infer::traits::ObligationCauseCode]
|
||||||
|
pub cause: Option<String>,
|
||||||
|
pub crate_local: bool,
|
||||||
|
/// Is the obligation "directly" user-specified, rather than derived?
|
||||||
|
pub direct: bool,
|
||||||
|
// A list of the generic arguments and their reified types
|
||||||
|
pub generic_args: Vec<(Symbol, String)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConditionOptions {
|
||||||
|
pub fn contains(&self, key: Symbol, value: &Option<String>) -> bool {
|
||||||
|
match (key, value) {
|
||||||
|
(sym::_Self | kw::SelfUpper, Some(value)) => self.self_types.contains(&value),
|
||||||
|
// from_desugaring as a flag
|
||||||
|
(sym::from_desugaring, None) => self.from_desugaring.is_some(),
|
||||||
|
// from_desugaring as key == value
|
||||||
|
(sym::from_desugaring, Some(v)) if let Some(ds) = self.from_desugaring => ds.matches(v),
|
||||||
|
(sym::cause, Some(value)) => self.cause.as_deref() == Some(value),
|
||||||
|
(sym::crate_local, None) => self.crate_local,
|
||||||
|
(sym::direct, None) => self.direct,
|
||||||
|
(other, Some(value)) => {
|
||||||
|
self.generic_args.iter().any(|(k, v)| *k == other && v == value)
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,414 @@
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use errors::*;
|
||||||
|
use rustc_middle::ty::TyCtxt;
|
||||||
|
use rustc_middle::ty::print::TraitRefPrintSugared;
|
||||||
|
use rustc_parse_format::{
|
||||||
|
Alignment, Argument, Count, FormatSpec, InnerSpan, ParseError, ParseMode, Parser,
|
||||||
|
Piece as RpfPiece, Position,
|
||||||
|
};
|
||||||
|
use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES;
|
||||||
|
use rustc_span::def_id::DefId;
|
||||||
|
use rustc_span::{BytePos, Pos, Span, Symbol, kw, sym};
|
||||||
|
|
||||||
|
/// Like [std::fmt::Arguments] this is a string that has been parsed into "pieces",
|
||||||
|
/// either as string pieces or dynamic arguments.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FormatString {
|
||||||
|
#[allow(dead_code, reason = "Debug impl")]
|
||||||
|
input: Symbol,
|
||||||
|
span: Span,
|
||||||
|
pieces: Vec<Piece>,
|
||||||
|
/// The formatting string was parsed succesfully but with warnings
|
||||||
|
pub warnings: Vec<FormatWarning>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum Piece {
|
||||||
|
Lit(String),
|
||||||
|
Arg(FormatArg),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum FormatArg {
|
||||||
|
// A generic parameter, like `{T}` if we're on the `From<T>` trait.
|
||||||
|
GenericParam {
|
||||||
|
generic_param: Symbol,
|
||||||
|
},
|
||||||
|
// `{Self}`
|
||||||
|
SelfUpper,
|
||||||
|
/// `{This}` or `{TraitName}`
|
||||||
|
This,
|
||||||
|
/// The sugared form of the trait
|
||||||
|
Trait,
|
||||||
|
/// what we're in, like a function, method, closure etc.
|
||||||
|
ItemContext,
|
||||||
|
/// What the user typed, if it doesn't match anything we can use.
|
||||||
|
AsIs(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum Ctx<'tcx> {
|
||||||
|
// `#[rustc_on_unimplemented]`
|
||||||
|
RustcOnUnimplemented { tcx: TyCtxt<'tcx>, trait_def_id: DefId },
|
||||||
|
// `#[diagnostic::...]`
|
||||||
|
DiagnosticOnUnimplemented { tcx: TyCtxt<'tcx>, trait_def_id: DefId },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum FormatWarning {
|
||||||
|
UnknownParam { argument_name: Symbol, span: Span },
|
||||||
|
PositionalArgument { span: Span, help: String },
|
||||||
|
InvalidSpecifier { name: String, span: Span },
|
||||||
|
FutureIncompat { span: Span, help: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormatWarning {
|
||||||
|
pub fn emit_warning<'tcx>(&self, tcx: TyCtxt<'tcx>, item_def_id: DefId) {
|
||||||
|
match *self {
|
||||||
|
FormatWarning::UnknownParam { argument_name, span } => {
|
||||||
|
let this = tcx.item_ident(item_def_id);
|
||||||
|
if let Some(item_def_id) = item_def_id.as_local() {
|
||||||
|
tcx.emit_node_span_lint(
|
||||||
|
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
||||||
|
tcx.local_def_id_to_hir_id(item_def_id),
|
||||||
|
span,
|
||||||
|
UnknownFormatParameterForOnUnimplementedAttr {
|
||||||
|
argument_name,
|
||||||
|
trait_name: this,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormatWarning::PositionalArgument { span, .. } => {
|
||||||
|
if let Some(item_def_id) = item_def_id.as_local() {
|
||||||
|
tcx.emit_node_span_lint(
|
||||||
|
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
||||||
|
tcx.local_def_id_to_hir_id(item_def_id),
|
||||||
|
span,
|
||||||
|
DisallowedPositionalArgument,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormatWarning::InvalidSpecifier { span, .. } => {
|
||||||
|
if let Some(item_def_id) = item_def_id.as_local() {
|
||||||
|
tcx.emit_node_span_lint(
|
||||||
|
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
||||||
|
tcx.local_def_id_to_hir_id(item_def_id),
|
||||||
|
span,
|
||||||
|
InvalidFormatSpecifier,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormatWarning::FutureIncompat { .. } => {
|
||||||
|
// We've never deprecated anything in diagnostic namespace format strings
|
||||||
|
// but if we do we will emit a warning here
|
||||||
|
|
||||||
|
// FIXME(mejrs) in a couple releases, start emitting warnings for
|
||||||
|
// #[rustc_on_unimplemented] deprecated args
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Arguments to fill a [FormatString] with.
|
||||||
|
///
|
||||||
|
/// For example, given a
|
||||||
|
/// ```rust,ignore (just an example)
|
||||||
|
///
|
||||||
|
/// #[rustc_on_unimplemented(
|
||||||
|
/// on(all(from_desugaring = "QuestionMark"),
|
||||||
|
/// message = "the `?` operator can only be used in {ItemContext} \
|
||||||
|
/// that returns `Result` or `Option` \
|
||||||
|
/// (or another type that implements `{FromResidual}`)",
|
||||||
|
/// label = "cannot use the `?` operator in {ItemContext} that returns `{Self}`",
|
||||||
|
/// parent_label = "this function should return `Result` or `Option` to accept `?`"
|
||||||
|
/// ),
|
||||||
|
/// )]
|
||||||
|
/// pub trait FromResidual<R = <Self as Try>::Residual> {
|
||||||
|
/// ...
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// async fn an_async_function() -> u32 {
|
||||||
|
/// let x: Option<u32> = None;
|
||||||
|
/// x?; //~ ERROR the `?` operator
|
||||||
|
/// 22
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
/// it will look like this:
|
||||||
|
///
|
||||||
|
/// ```rust,ignore (just an example)
|
||||||
|
/// FormatArgs {
|
||||||
|
/// this: "FromResidual",
|
||||||
|
/// trait_sugared: "FromResidual<Option<Infallible>>",
|
||||||
|
/// item_context: "an async function",
|
||||||
|
/// generic_args: [("Self", "u32"), ("R", "Option<Infallible>")],
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FormatArgs<'tcx> {
|
||||||
|
pub this: String,
|
||||||
|
pub trait_sugared: TraitRefPrintSugared<'tcx>,
|
||||||
|
pub item_context: &'static str,
|
||||||
|
pub generic_args: Vec<(Symbol, String)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormatString {
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse<'tcx>(
|
||||||
|
input: Symbol,
|
||||||
|
span: Span,
|
||||||
|
ctx: &Ctx<'tcx>,
|
||||||
|
) -> Result<Self, Vec<ParseError>> {
|
||||||
|
let s = input.as_str();
|
||||||
|
let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
|
||||||
|
let mut pieces = Vec::new();
|
||||||
|
let mut warnings = Vec::new();
|
||||||
|
|
||||||
|
for piece in &mut parser {
|
||||||
|
match piece {
|
||||||
|
RpfPiece::Lit(lit) => {
|
||||||
|
pieces.push(Piece::Lit(lit.into()));
|
||||||
|
}
|
||||||
|
RpfPiece::NextArgument(arg) => {
|
||||||
|
warn_on_format_spec(arg.format, &mut warnings, span);
|
||||||
|
let arg = parse_arg(&arg, ctx, &mut warnings, span);
|
||||||
|
pieces.push(Piece::Arg(arg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if parser.errors.is_empty() {
|
||||||
|
Ok(FormatString { input, pieces, span, warnings })
|
||||||
|
} else {
|
||||||
|
Err(parser.errors)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format(&self, args: &FormatArgs<'_>) -> String {
|
||||||
|
let mut ret = String::new();
|
||||||
|
for piece in &self.pieces {
|
||||||
|
match piece {
|
||||||
|
Piece::Lit(s) | Piece::Arg(FormatArg::AsIs(s)) => ret.push_str(&s),
|
||||||
|
|
||||||
|
// `A` if we have `trait Trait<A> {}` and `note = "i'm the actual type of {A}"`
|
||||||
|
Piece::Arg(FormatArg::GenericParam { generic_param }) => {
|
||||||
|
// Should always be some but we can't raise errors here
|
||||||
|
let value = match args.generic_args.iter().find(|(p, _)| p == generic_param) {
|
||||||
|
Some((_, val)) => val.to_string(),
|
||||||
|
None => generic_param.to_string(),
|
||||||
|
};
|
||||||
|
ret.push_str(&value);
|
||||||
|
}
|
||||||
|
// `{Self}`
|
||||||
|
Piece::Arg(FormatArg::SelfUpper) => {
|
||||||
|
let slf = match args.generic_args.iter().find(|(p, _)| *p == kw::SelfUpper) {
|
||||||
|
Some((_, val)) => val.to_string(),
|
||||||
|
None => "Self".to_string(),
|
||||||
|
};
|
||||||
|
ret.push_str(&slf);
|
||||||
|
}
|
||||||
|
|
||||||
|
// It's only `rustc_onunimplemented` from here
|
||||||
|
Piece::Arg(FormatArg::This) => ret.push_str(&args.this),
|
||||||
|
Piece::Arg(FormatArg::Trait) => {
|
||||||
|
let _ = fmt::write(&mut ret, format_args!("{}", &args.trait_sugared));
|
||||||
|
}
|
||||||
|
Piece::Arg(FormatArg::ItemContext) => ret.push_str(args.item_context),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_arg<'tcx>(
|
||||||
|
arg: &Argument<'_>,
|
||||||
|
ctx: &Ctx<'tcx>,
|
||||||
|
warnings: &mut Vec<FormatWarning>,
|
||||||
|
input_span: Span,
|
||||||
|
) -> FormatArg {
|
||||||
|
let (Ctx::RustcOnUnimplemented { tcx, trait_def_id }
|
||||||
|
| Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id }) = ctx;
|
||||||
|
let trait_name = tcx.item_ident(*trait_def_id);
|
||||||
|
let generics = tcx.generics_of(trait_def_id);
|
||||||
|
let span = slice_span(input_span, arg.position_span);
|
||||||
|
|
||||||
|
match arg.position {
|
||||||
|
// Something like "hello {name}"
|
||||||
|
Position::ArgumentNamed(name) => match (ctx, Symbol::intern(name)) {
|
||||||
|
// accepted, but deprecated
|
||||||
|
(Ctx::RustcOnUnimplemented { .. }, sym::_Self) => {
|
||||||
|
warnings
|
||||||
|
.push(FormatWarning::FutureIncompat { span, help: String::from("use {Self}") });
|
||||||
|
FormatArg::SelfUpper
|
||||||
|
}
|
||||||
|
(
|
||||||
|
Ctx::RustcOnUnimplemented { .. },
|
||||||
|
sym::from_desugaring
|
||||||
|
| sym::crate_local
|
||||||
|
| sym::direct
|
||||||
|
| sym::cause
|
||||||
|
| sym::float
|
||||||
|
| sym::integer_
|
||||||
|
| sym::integral,
|
||||||
|
) => {
|
||||||
|
warnings.push(FormatWarning::FutureIncompat {
|
||||||
|
span,
|
||||||
|
help: String::from("don't use this in a format string"),
|
||||||
|
});
|
||||||
|
FormatArg::AsIs(String::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only `#[rustc_on_unimplemented]` can use these
|
||||||
|
(Ctx::RustcOnUnimplemented { .. }, sym::ItemContext) => FormatArg::ItemContext,
|
||||||
|
(Ctx::RustcOnUnimplemented { .. }, sym::This) => FormatArg::This,
|
||||||
|
(Ctx::RustcOnUnimplemented { .. }, sym::Trait) => FormatArg::Trait,
|
||||||
|
// `{ThisTraitsName}`. Some attrs in std use this, but I'd like to change it to the more general `{This}`
|
||||||
|
// because that'll be simpler to parse and extend in the future
|
||||||
|
(Ctx::RustcOnUnimplemented { .. }, name) if name == trait_name.name => {
|
||||||
|
warnings
|
||||||
|
.push(FormatWarning::FutureIncompat { span, help: String::from("use {This}") });
|
||||||
|
FormatArg::This
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any attribute can use these
|
||||||
|
(
|
||||||
|
Ctx::RustcOnUnimplemented { .. } | Ctx::DiagnosticOnUnimplemented { .. },
|
||||||
|
kw::SelfUpper,
|
||||||
|
) => FormatArg::SelfUpper,
|
||||||
|
(
|
||||||
|
Ctx::RustcOnUnimplemented { .. } | Ctx::DiagnosticOnUnimplemented { .. },
|
||||||
|
generic_param,
|
||||||
|
) if generics.own_params.iter().any(|param| param.name == generic_param) => {
|
||||||
|
FormatArg::GenericParam { generic_param }
|
||||||
|
}
|
||||||
|
|
||||||
|
(_, argument_name) => {
|
||||||
|
warnings.push(FormatWarning::UnknownParam { argument_name, span });
|
||||||
|
FormatArg::AsIs(format!("{{{}}}", argument_name.as_str()))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// `{:1}` and `{}` are ignored
|
||||||
|
Position::ArgumentIs(idx) => {
|
||||||
|
warnings.push(FormatWarning::PositionalArgument {
|
||||||
|
span,
|
||||||
|
help: format!("use `{{{idx}}}` to print a number in braces"),
|
||||||
|
});
|
||||||
|
FormatArg::AsIs(format!("{{{idx}}}"))
|
||||||
|
}
|
||||||
|
Position::ArgumentImplicitlyIs(_) => {
|
||||||
|
warnings.push(FormatWarning::PositionalArgument {
|
||||||
|
span,
|
||||||
|
help: String::from("use `{{}}` to print empty braces"),
|
||||||
|
});
|
||||||
|
FormatArg::AsIs(String::from("{}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `#[rustc_on_unimplemented]` and `#[diagnostic::...]` don't actually do anything
|
||||||
|
/// with specifiers, so emit a warning if they are used.
|
||||||
|
fn warn_on_format_spec(spec: FormatSpec<'_>, warnings: &mut Vec<FormatWarning>, input_span: Span) {
|
||||||
|
if !matches!(
|
||||||
|
spec,
|
||||||
|
FormatSpec {
|
||||||
|
fill: None,
|
||||||
|
fill_span: None,
|
||||||
|
align: Alignment::AlignUnknown,
|
||||||
|
sign: None,
|
||||||
|
alternate: false,
|
||||||
|
zero_pad: false,
|
||||||
|
debug_hex: None,
|
||||||
|
precision: Count::CountImplied,
|
||||||
|
precision_span: None,
|
||||||
|
width: Count::CountImplied,
|
||||||
|
width_span: None,
|
||||||
|
ty: _,
|
||||||
|
ty_span: _,
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
let span = spec.ty_span.map(|inner| slice_span(input_span, inner)).unwrap_or(input_span);
|
||||||
|
warnings.push(FormatWarning::InvalidSpecifier { span, name: spec.ty.into() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function because `Span` and `rustc_parse_format::InnerSpan` don't know about each other
|
||||||
|
fn slice_span(input: Span, inner: InnerSpan) -> Span {
|
||||||
|
let InnerSpan { start, end } = inner;
|
||||||
|
let span = input.data();
|
||||||
|
|
||||||
|
Span::new(
|
||||||
|
span.lo + BytePos::from_usize(start),
|
||||||
|
span.lo + BytePos::from_usize(end),
|
||||||
|
span.ctxt,
|
||||||
|
span.parent,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod errors {
|
||||||
|
use rustc_macros::LintDiagnostic;
|
||||||
|
use rustc_span::Ident;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[derive(LintDiagnostic)]
|
||||||
|
#[diag(trait_selection_unknown_format_parameter_for_on_unimplemented_attr)]
|
||||||
|
#[help]
|
||||||
|
pub struct UnknownFormatParameterForOnUnimplementedAttr {
|
||||||
|
pub argument_name: Symbol,
|
||||||
|
pub trait_name: Ident,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(LintDiagnostic)]
|
||||||
|
#[diag(trait_selection_disallowed_positional_argument)]
|
||||||
|
#[help]
|
||||||
|
pub struct DisallowedPositionalArgument;
|
||||||
|
|
||||||
|
#[derive(LintDiagnostic)]
|
||||||
|
#[diag(trait_selection_invalid_format_specifier)]
|
||||||
|
#[help]
|
||||||
|
pub struct InvalidFormatSpecifier;
|
||||||
|
|
||||||
|
#[derive(LintDiagnostic)]
|
||||||
|
#[diag(trait_selection_missing_options_for_on_unimplemented_attr)]
|
||||||
|
#[help]
|
||||||
|
pub struct MissingOptionsForOnUnimplementedAttr;
|
||||||
|
|
||||||
|
#[derive(LintDiagnostic)]
|
||||||
|
#[diag(trait_selection_ignored_diagnostic_option)]
|
||||||
|
pub struct IgnoredDiagnosticOption {
|
||||||
|
pub option_name: &'static str,
|
||||||
|
#[label]
|
||||||
|
pub span: Span,
|
||||||
|
#[label(trait_selection_other_label)]
|
||||||
|
pub prev_span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IgnoredDiagnosticOption {
|
||||||
|
pub fn maybe_emit_warning<'tcx>(
|
||||||
|
tcx: TyCtxt<'tcx>,
|
||||||
|
item_def_id: DefId,
|
||||||
|
new: Option<Span>,
|
||||||
|
old: Option<Span>,
|
||||||
|
option_name: &'static str,
|
||||||
|
) {
|
||||||
|
if let (Some(new_item), Some(old_item)) = (new, old) {
|
||||||
|
if let Some(item_def_id) = item_def_id.as_local() {
|
||||||
|
tcx.emit_node_span_lint(
|
||||||
|
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
||||||
|
tcx.local_def_id_to_hir_id(item_def_id),
|
||||||
|
new_item,
|
||||||
|
IgnoredDiagnosticOption {
|
||||||
|
span: new_item,
|
||||||
|
prev_span: old_item,
|
||||||
|
option_name,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,19 +1,18 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::atomic::{AtomicU32, Ordering};
|
use std::sync::atomic::{AtomicU32, Ordering};
|
||||||
|
|
||||||
use tracing::instrument;
|
use super::{Byte, Ref, Tree, Uninhabited};
|
||||||
|
|
||||||
use super::{Byte, Nfa, Ref, nfa};
|
|
||||||
use crate::Map;
|
use crate::Map;
|
||||||
|
|
||||||
#[derive(PartialEq, Clone, Debug)]
|
#[derive(PartialEq)]
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
pub(crate) struct Dfa<R>
|
pub(crate) struct Dfa<R>
|
||||||
where
|
where
|
||||||
R: Ref,
|
R: Ref,
|
||||||
{
|
{
|
||||||
pub(crate) transitions: Map<State, Transitions<R>>,
|
pub(crate) transitions: Map<State, Transitions<R>>,
|
||||||
pub(crate) start: State,
|
pub(crate) start: State,
|
||||||
pub(crate) accepting: State,
|
pub(crate) accept: State,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Clone, Debug)]
|
#[derive(PartialEq, Clone, Debug)]
|
||||||
|
@ -34,35 +33,15 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R> Transitions<R>
|
/// The states in a [`Dfa`] represent byte offsets.
|
||||||
where
|
|
||||||
R: Ref,
|
|
||||||
{
|
|
||||||
#[cfg(test)]
|
|
||||||
fn insert(&mut self, transition: Transition<R>, state: State) {
|
|
||||||
match transition {
|
|
||||||
Transition::Byte(b) => {
|
|
||||||
self.byte_transitions.insert(b, state);
|
|
||||||
}
|
|
||||||
Transition::Ref(r) => {
|
|
||||||
self.ref_transitions.insert(r, state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The states in a `Nfa` represent byte offsets.
|
|
||||||
#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Copy, Clone)]
|
#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Copy, Clone)]
|
||||||
pub(crate) struct State(u32);
|
pub(crate) struct State(pub(crate) u32);
|
||||||
|
|
||||||
#[cfg(test)]
|
impl State {
|
||||||
#[derive(Hash, Eq, PartialEq, Clone, Copy)]
|
pub(crate) fn new() -> Self {
|
||||||
pub(crate) enum Transition<R>
|
static COUNTER: AtomicU32 = AtomicU32::new(0);
|
||||||
where
|
Self(COUNTER.fetch_add(1, Ordering::SeqCst))
|
||||||
R: Ref,
|
}
|
||||||
{
|
|
||||||
Byte(Byte),
|
|
||||||
Ref(R),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for State {
|
impl fmt::Debug for State {
|
||||||
|
@ -71,19 +50,6 @@ impl fmt::Debug for State {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
impl<R> fmt::Debug for Transition<R>
|
|
||||||
where
|
|
||||||
R: Ref,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match &self {
|
|
||||||
Self::Byte(b) => b.fmt(f),
|
|
||||||
Self::Ref(r) => r.fmt(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R> Dfa<R>
|
impl<R> Dfa<R>
|
||||||
where
|
where
|
||||||
R: Ref,
|
R: Ref,
|
||||||
|
@ -92,60 +58,167 @@ where
|
||||||
pub(crate) fn bool() -> Self {
|
pub(crate) fn bool() -> Self {
|
||||||
let mut transitions: Map<State, Transitions<R>> = Map::default();
|
let mut transitions: Map<State, Transitions<R>> = Map::default();
|
||||||
let start = State::new();
|
let start = State::new();
|
||||||
let accepting = State::new();
|
let accept = State::new();
|
||||||
|
|
||||||
transitions.entry(start).or_default().insert(Transition::Byte(Byte::Init(0x00)), accepting);
|
transitions.entry(start).or_default().byte_transitions.insert(Byte::Init(0x00), accept);
|
||||||
|
|
||||||
transitions.entry(start).or_default().insert(Transition::Byte(Byte::Init(0x01)), accepting);
|
transitions.entry(start).or_default().byte_transitions.insert(Byte::Init(0x01), accept);
|
||||||
|
|
||||||
Self { transitions, start, accepting }
|
Self { transitions, start, accept }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug")]
|
pub(crate) fn unit() -> Self {
|
||||||
pub(crate) fn from_nfa(nfa: Nfa<R>) -> Self {
|
let transitions: Map<State, Transitions<R>> = Map::default();
|
||||||
let Nfa { transitions: nfa_transitions, start: nfa_start, accepting: nfa_accepting } = nfa;
|
let start = State::new();
|
||||||
|
let accept = start;
|
||||||
|
|
||||||
let mut dfa_transitions: Map<State, Transitions<R>> = Map::default();
|
Self { transitions, start, accept }
|
||||||
let mut nfa_to_dfa: Map<nfa::State, State> = Map::default();
|
|
||||||
let dfa_start = State::new();
|
|
||||||
nfa_to_dfa.insert(nfa_start, dfa_start);
|
|
||||||
|
|
||||||
let mut queue = vec![(nfa_start, dfa_start)];
|
|
||||||
|
|
||||||
while let Some((nfa_state, dfa_state)) = queue.pop() {
|
|
||||||
if nfa_state == nfa_accepting {
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (nfa_transition, next_nfa_states) in nfa_transitions[&nfa_state].iter() {
|
pub(crate) fn from_byte(byte: Byte) -> Self {
|
||||||
let dfa_transitions =
|
let mut transitions: Map<State, Transitions<R>> = Map::default();
|
||||||
dfa_transitions.entry(dfa_state).or_insert_with(Default::default);
|
let start = State::new();
|
||||||
|
let accept = State::new();
|
||||||
|
|
||||||
let mapped_state = next_nfa_states.iter().find_map(|x| nfa_to_dfa.get(x).copied());
|
transitions.entry(start).or_default().byte_transitions.insert(byte, accept);
|
||||||
|
|
||||||
let next_dfa_state = match nfa_transition {
|
Self { transitions, start, accept }
|
||||||
&nfa::Transition::Byte(b) => *dfa_transitions
|
}
|
||||||
.byte_transitions
|
|
||||||
.entry(b)
|
pub(crate) fn from_ref(r: R) -> Self {
|
||||||
.or_insert_with(|| mapped_state.unwrap_or_else(State::new)),
|
let mut transitions: Map<State, Transitions<R>> = Map::default();
|
||||||
&nfa::Transition::Ref(r) => *dfa_transitions
|
let start = State::new();
|
||||||
.ref_transitions
|
let accept = State::new();
|
||||||
.entry(r)
|
|
||||||
.or_insert_with(|| mapped_state.unwrap_or_else(State::new)),
|
transitions.entry(start).or_default().ref_transitions.insert(r, accept);
|
||||||
|
|
||||||
|
Self { transitions, start, accept }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_tree(tree: Tree<!, R>) -> Result<Self, Uninhabited> {
|
||||||
|
Ok(match tree {
|
||||||
|
Tree::Byte(b) => Self::from_byte(b),
|
||||||
|
Tree::Ref(r) => Self::from_ref(r),
|
||||||
|
Tree::Alt(alts) => {
|
||||||
|
// Convert and filter the inhabited alternatives.
|
||||||
|
let mut alts = alts.into_iter().map(Self::from_tree).filter_map(Result::ok);
|
||||||
|
// If there are no alternatives, return `Uninhabited`.
|
||||||
|
let dfa = alts.next().ok_or(Uninhabited)?;
|
||||||
|
// Combine the remaining alternatives with `dfa`.
|
||||||
|
alts.fold(dfa, |dfa, alt| dfa.union(alt, State::new))
|
||||||
|
}
|
||||||
|
Tree::Seq(elts) => {
|
||||||
|
let mut dfa = Self::unit();
|
||||||
|
for elt in elts.into_iter().map(Self::from_tree) {
|
||||||
|
dfa = dfa.concat(elt?);
|
||||||
|
}
|
||||||
|
dfa
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Concatenate two `Dfa`s.
|
||||||
|
pub(crate) fn concat(self, other: Self) -> Self {
|
||||||
|
if self.start == self.accept {
|
||||||
|
return other;
|
||||||
|
} else if other.start == other.accept {
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
let start = self.start;
|
||||||
|
let accept = other.accept;
|
||||||
|
|
||||||
|
let mut transitions: Map<State, Transitions<R>> = self.transitions;
|
||||||
|
|
||||||
|
for (source, transition) in other.transitions {
|
||||||
|
let fix_state = |state| if state == other.start { self.accept } else { state };
|
||||||
|
let entry = transitions.entry(fix_state(source)).or_default();
|
||||||
|
for (edge, destination) in transition.byte_transitions {
|
||||||
|
entry.byte_transitions.insert(edge, fix_state(destination));
|
||||||
|
}
|
||||||
|
for (edge, destination) in transition.ref_transitions {
|
||||||
|
entry.ref_transitions.insert(edge, fix_state(destination));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self { transitions, start, accept }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute the union of two `Dfa`s.
|
||||||
|
pub(crate) fn union(self, other: Self, mut new_state: impl FnMut() -> State) -> Self {
|
||||||
|
// We implement `union` by lazily initializing a set of states
|
||||||
|
// corresponding to the product of states in `self` and `other`, and
|
||||||
|
// then add transitions between these states that correspond to where
|
||||||
|
// they exist between `self` and `other`.
|
||||||
|
|
||||||
|
let a = self;
|
||||||
|
let b = other;
|
||||||
|
|
||||||
|
let accept = new_state();
|
||||||
|
|
||||||
|
let mut mapping: Map<(Option<State>, Option<State>), State> = Map::default();
|
||||||
|
|
||||||
|
let mut mapped = |(a_state, b_state)| {
|
||||||
|
if Some(a.accept) == a_state || Some(b.accept) == b_state {
|
||||||
|
// If either `a_state` or `b_state` are accepting, map to a
|
||||||
|
// common `accept` state.
|
||||||
|
accept
|
||||||
|
} else {
|
||||||
|
*mapping.entry((a_state, b_state)).or_insert_with(&mut new_state)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for &next_nfa_state in next_nfa_states {
|
let start = mapped((Some(a.start), Some(b.start)));
|
||||||
nfa_to_dfa.entry(next_nfa_state).or_insert_with(|| {
|
let mut transitions: Map<State, Transitions<R>> = Map::default();
|
||||||
queue.push((next_nfa_state, next_dfa_state));
|
let mut queue = vec![(Some(a.start), Some(b.start))];
|
||||||
next_dfa_state
|
let empty_transitions = Transitions::default();
|
||||||
});
|
|
||||||
|
while let Some((a_src, b_src)) = queue.pop() {
|
||||||
|
let a_transitions =
|
||||||
|
a_src.and_then(|a_src| a.transitions.get(&a_src)).unwrap_or(&empty_transitions);
|
||||||
|
let b_transitions =
|
||||||
|
b_src.and_then(|b_src| b.transitions.get(&b_src)).unwrap_or(&empty_transitions);
|
||||||
|
|
||||||
|
let byte_transitions =
|
||||||
|
a_transitions.byte_transitions.keys().chain(b_transitions.byte_transitions.keys());
|
||||||
|
|
||||||
|
for byte_transition in byte_transitions {
|
||||||
|
let a_dst = a_transitions.byte_transitions.get(byte_transition).copied();
|
||||||
|
let b_dst = b_transitions.byte_transitions.get(byte_transition).copied();
|
||||||
|
|
||||||
|
assert!(a_dst.is_some() || b_dst.is_some());
|
||||||
|
|
||||||
|
let src = mapped((a_src, b_src));
|
||||||
|
let dst = mapped((a_dst, b_dst));
|
||||||
|
|
||||||
|
transitions.entry(src).or_default().byte_transitions.insert(*byte_transition, dst);
|
||||||
|
|
||||||
|
if !transitions.contains_key(&dst) {
|
||||||
|
queue.push((a_dst, b_dst))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let ref_transitions =
|
||||||
|
a_transitions.ref_transitions.keys().chain(b_transitions.ref_transitions.keys());
|
||||||
|
|
||||||
|
for ref_transition in ref_transitions {
|
||||||
|
let a_dst = a_transitions.ref_transitions.get(ref_transition).copied();
|
||||||
|
let b_dst = b_transitions.ref_transitions.get(ref_transition).copied();
|
||||||
|
|
||||||
|
assert!(a_dst.is_some() || b_dst.is_some());
|
||||||
|
|
||||||
|
let src = mapped((a_src, b_src));
|
||||||
|
let dst = mapped((a_dst, b_dst));
|
||||||
|
|
||||||
|
transitions.entry(src).or_default().ref_transitions.insert(*ref_transition, dst);
|
||||||
|
|
||||||
|
if !transitions.contains_key(&dst) {
|
||||||
|
queue.push((a_dst, b_dst))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dfa_accepting = nfa_to_dfa[&nfa_accepting];
|
Self { transitions, start, accept }
|
||||||
|
|
||||||
Self { transitions: dfa_transitions, start: dfa_start, accepting: dfa_accepting }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn bytes_from(&self, start: State) -> Option<&Map<Byte, State>> {
|
pub(crate) fn bytes_from(&self, start: State) -> Option<&Map<Byte, State>> {
|
||||||
|
@ -159,24 +232,48 @@ where
|
||||||
pub(crate) fn refs_from(&self, start: State) -> Option<&Map<R, State>> {
|
pub(crate) fn refs_from(&self, start: State) -> Option<&Map<R, State>> {
|
||||||
Some(&self.transitions.get(&start)?.ref_transitions)
|
Some(&self.transitions.get(&start)?.ref_transitions)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl State {
|
#[cfg(test)]
|
||||||
pub(crate) fn new() -> Self {
|
pub(crate) fn from_edges<B: Copy + Into<Byte>>(
|
||||||
static COUNTER: AtomicU32 = AtomicU32::new(0);
|
start: u32,
|
||||||
Self(COUNTER.fetch_add(1, Ordering::SeqCst))
|
accept: u32,
|
||||||
|
edges: &[(u32, B, u32)],
|
||||||
|
) -> Self {
|
||||||
|
let start = State(start);
|
||||||
|
let accept = State(accept);
|
||||||
|
let mut transitions: Map<State, Transitions<R>> = Map::default();
|
||||||
|
|
||||||
|
for &(src, edge, dst) in edges {
|
||||||
|
let src = State(src);
|
||||||
|
let dst = State(dst);
|
||||||
|
let old = transitions.entry(src).or_default().byte_transitions.insert(edge.into(), dst);
|
||||||
|
assert!(old.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
Self { start, accept, transitions }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
/// Serialize the DFA using the Graphviz DOT format.
|
||||||
impl<R> From<nfa::Transition<R>> for Transition<R>
|
impl<R> fmt::Debug for Dfa<R>
|
||||||
where
|
where
|
||||||
R: Ref,
|
R: Ref,
|
||||||
{
|
{
|
||||||
fn from(nfa_transition: nfa::Transition<R>) -> Self {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match nfa_transition {
|
writeln!(f, "digraph {{")?;
|
||||||
nfa::Transition::Byte(byte) => Transition::Byte(byte),
|
writeln!(f, " {:?} [shape = doublecircle]", self.start)?;
|
||||||
nfa::Transition::Ref(r) => Transition::Ref(r),
|
writeln!(f, " {:?} [shape = doublecircle]", self.accept)?;
|
||||||
|
|
||||||
|
for (src, transitions) in self.transitions.iter() {
|
||||||
|
for (t, dst) in transitions.byte_transitions.iter() {
|
||||||
|
writeln!(f, " {src:?} -> {dst:?} [label=\"{t:?}\"]")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (t, dst) in transitions.ref_transitions.iter() {
|
||||||
|
writeln!(f, " {src:?} -> {dst:?} [label=\"{t:?}\"]")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeln!(f, "}}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,9 +4,6 @@ use std::hash::Hash;
|
||||||
pub(crate) mod tree;
|
pub(crate) mod tree;
|
||||||
pub(crate) use tree::Tree;
|
pub(crate) use tree::Tree;
|
||||||
|
|
||||||
pub(crate) mod nfa;
|
|
||||||
pub(crate) use nfa::Nfa;
|
|
||||||
|
|
||||||
pub(crate) mod dfa;
|
pub(crate) mod dfa;
|
||||||
pub(crate) use dfa::Dfa;
|
pub(crate) use dfa::Dfa;
|
||||||
|
|
||||||
|
@ -29,6 +26,13 @@ impl fmt::Debug for Byte {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl From<u8> for Byte {
|
||||||
|
fn from(src: u8) -> Self {
|
||||||
|
Self::Init(src)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) trait Def: Debug + Hash + Eq + PartialEq + Copy + Clone {
|
pub(crate) trait Def: Debug + Hash + Eq + PartialEq + Copy + Clone {
|
||||||
fn has_safety_invariants(&self) -> bool;
|
fn has_safety_invariants(&self) -> bool;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,169 +0,0 @@
|
||||||
use std::fmt;
|
|
||||||
use std::sync::atomic::{AtomicU32, Ordering};
|
|
||||||
|
|
||||||
use super::{Byte, Ref, Tree, Uninhabited};
|
|
||||||
use crate::{Map, Set};
|
|
||||||
|
|
||||||
/// A non-deterministic finite automaton (NFA) that represents the layout of a type.
|
|
||||||
/// The transmutability of two given types is computed by comparing their `Nfa`s.
|
|
||||||
#[derive(PartialEq, Debug)]
|
|
||||||
pub(crate) struct Nfa<R>
|
|
||||||
where
|
|
||||||
R: Ref,
|
|
||||||
{
|
|
||||||
pub(crate) transitions: Map<State, Map<Transition<R>, Set<State>>>,
|
|
||||||
pub(crate) start: State,
|
|
||||||
pub(crate) accepting: State,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The states in a `Nfa` represent byte offsets.
|
|
||||||
#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Copy, Clone)]
|
|
||||||
pub(crate) struct State(u32);
|
|
||||||
|
|
||||||
/// The transitions between states in a `Nfa` reflect bit validity.
|
|
||||||
#[derive(Hash, Eq, PartialEq, Clone, Copy)]
|
|
||||||
pub(crate) enum Transition<R>
|
|
||||||
where
|
|
||||||
R: Ref,
|
|
||||||
{
|
|
||||||
Byte(Byte),
|
|
||||||
Ref(R),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for State {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "S_{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R> fmt::Debug for Transition<R>
|
|
||||||
where
|
|
||||||
R: Ref,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match &self {
|
|
||||||
Self::Byte(b) => b.fmt(f),
|
|
||||||
Self::Ref(r) => r.fmt(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R> Nfa<R>
|
|
||||||
where
|
|
||||||
R: Ref,
|
|
||||||
{
|
|
||||||
pub(crate) fn unit() -> Self {
|
|
||||||
let transitions: Map<State, Map<Transition<R>, Set<State>>> = Map::default();
|
|
||||||
let start = State::new();
|
|
||||||
let accepting = start;
|
|
||||||
|
|
||||||
Nfa { transitions, start, accepting }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_byte(byte: Byte) -> Self {
|
|
||||||
let mut transitions: Map<State, Map<Transition<R>, Set<State>>> = Map::default();
|
|
||||||
let start = State::new();
|
|
||||||
let accepting = State::new();
|
|
||||||
|
|
||||||
let source = transitions.entry(start).or_default();
|
|
||||||
let edge = source.entry(Transition::Byte(byte)).or_default();
|
|
||||||
edge.insert(accepting);
|
|
||||||
|
|
||||||
Nfa { transitions, start, accepting }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_ref(r: R) -> Self {
|
|
||||||
let mut transitions: Map<State, Map<Transition<R>, Set<State>>> = Map::default();
|
|
||||||
let start = State::new();
|
|
||||||
let accepting = State::new();
|
|
||||||
|
|
||||||
let source = transitions.entry(start).or_default();
|
|
||||||
let edge = source.entry(Transition::Ref(r)).or_default();
|
|
||||||
edge.insert(accepting);
|
|
||||||
|
|
||||||
Nfa { transitions, start, accepting }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn from_tree(tree: Tree<!, R>) -> Result<Self, Uninhabited> {
|
|
||||||
Ok(match tree {
|
|
||||||
Tree::Byte(b) => Self::from_byte(b),
|
|
||||||
Tree::Ref(r) => Self::from_ref(r),
|
|
||||||
Tree::Alt(alts) => {
|
|
||||||
let mut alts = alts.into_iter().map(Self::from_tree);
|
|
||||||
let mut nfa = alts.next().ok_or(Uninhabited)??;
|
|
||||||
for alt in alts {
|
|
||||||
nfa = nfa.union(alt?);
|
|
||||||
}
|
|
||||||
nfa
|
|
||||||
}
|
|
||||||
Tree::Seq(elts) => {
|
|
||||||
let mut nfa = Self::unit();
|
|
||||||
for elt in elts.into_iter().map(Self::from_tree) {
|
|
||||||
nfa = nfa.concat(elt?);
|
|
||||||
}
|
|
||||||
nfa
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Concatenate two `Nfa`s.
|
|
||||||
pub(crate) fn concat(self, other: Self) -> Self {
|
|
||||||
if self.start == self.accepting {
|
|
||||||
return other;
|
|
||||||
} else if other.start == other.accepting {
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
let start = self.start;
|
|
||||||
let accepting = other.accepting;
|
|
||||||
|
|
||||||
let mut transitions: Map<State, Map<Transition<R>, Set<State>>> = self.transitions;
|
|
||||||
|
|
||||||
for (source, transition) in other.transitions {
|
|
||||||
let fix_state = |state| if state == other.start { self.accepting } else { state };
|
|
||||||
let entry = transitions.entry(fix_state(source)).or_default();
|
|
||||||
for (edge, destinations) in transition {
|
|
||||||
let entry = entry.entry(edge).or_default();
|
|
||||||
for destination in destinations {
|
|
||||||
entry.insert(fix_state(destination));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self { transitions, start, accepting }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Compute the union of two `Nfa`s.
|
|
||||||
pub(crate) fn union(self, other: Self) -> Self {
|
|
||||||
let start = self.start;
|
|
||||||
let accepting = self.accepting;
|
|
||||||
|
|
||||||
let mut transitions: Map<State, Map<Transition<R>, Set<State>>> = self.transitions.clone();
|
|
||||||
|
|
||||||
for (&(mut source), transition) in other.transitions.iter() {
|
|
||||||
// if source is starting state of `other`, replace with starting state of `self`
|
|
||||||
if source == other.start {
|
|
||||||
source = self.start;
|
|
||||||
}
|
|
||||||
let entry = transitions.entry(source).or_default();
|
|
||||||
for (edge, destinations) in transition {
|
|
||||||
let entry = entry.entry(*edge).or_default();
|
|
||||||
for &(mut destination) in destinations {
|
|
||||||
// if dest is accepting state of `other`, replace with accepting state of `self`
|
|
||||||
if destination == other.accepting {
|
|
||||||
destination = self.accepting;
|
|
||||||
}
|
|
||||||
entry.insert(destination);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self { transitions, start, accepting }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl State {
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
static COUNTER: AtomicU32 = AtomicU32::new(0);
|
|
||||||
Self(COUNTER.fetch_add(1, Ordering::SeqCst))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -2,7 +2,7 @@
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
||||||
pub(crate) use rustc_data_structures::fx::{FxIndexMap as Map, FxIndexSet as Set};
|
pub(crate) use rustc_data_structures::fx::FxIndexMap as Map;
|
||||||
|
|
||||||
pub mod layout;
|
pub mod layout;
|
||||||
mod maybe_transmutable;
|
mod maybe_transmutable;
|
||||||
|
|
|
@ -4,7 +4,7 @@ pub(crate) mod query_context;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
use crate::layout::{self, Byte, Def, Dfa, Nfa, Ref, Tree, Uninhabited, dfa};
|
use crate::layout::{self, Byte, Def, Dfa, Ref, Tree, Uninhabited, dfa};
|
||||||
use crate::maybe_transmutable::query_context::QueryContext;
|
use crate::maybe_transmutable::query_context::QueryContext;
|
||||||
use crate::{Answer, Condition, Map, Reason};
|
use crate::{Answer, Condition, Map, Reason};
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ where
|
||||||
/// Answers whether a `Tree` is transmutable into another `Tree`.
|
/// Answers whether a `Tree` is transmutable into another `Tree`.
|
||||||
///
|
///
|
||||||
/// This method begins by de-def'ing `src` and `dst`, and prunes private paths from `dst`,
|
/// This method begins by de-def'ing `src` and `dst`, and prunes private paths from `dst`,
|
||||||
/// then converts `src` and `dst` to `Nfa`s, and computes an answer using those NFAs.
|
/// then converts `src` and `dst` to `Dfa`s, and computes an answer using those DFAs.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))]
|
#[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))]
|
||||||
pub(crate) fn answer(self) -> Answer<<C as QueryContext>::Ref> {
|
pub(crate) fn answer(self) -> Answer<<C as QueryContext>::Ref> {
|
||||||
|
@ -105,22 +105,22 @@ where
|
||||||
|
|
||||||
trace!(?dst, "pruned dst");
|
trace!(?dst, "pruned dst");
|
||||||
|
|
||||||
// Convert `src` from a tree-based representation to an NFA-based
|
// Convert `src` from a tree-based representation to an DFA-based
|
||||||
// representation. If the conversion fails because `src` is uninhabited,
|
// representation. If the conversion fails because `src` is uninhabited,
|
||||||
// conclude that the transmutation is acceptable, because instances of
|
// conclude that the transmutation is acceptable, because instances of
|
||||||
// the `src` type do not exist.
|
// the `src` type do not exist.
|
||||||
let src = match Nfa::from_tree(src) {
|
let src = match Dfa::from_tree(src) {
|
||||||
Ok(src) => src,
|
Ok(src) => src,
|
||||||
Err(Uninhabited) => return Answer::Yes,
|
Err(Uninhabited) => return Answer::Yes,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Convert `dst` from a tree-based representation to an NFA-based
|
// Convert `dst` from a tree-based representation to an DFA-based
|
||||||
// representation. If the conversion fails because `src` is uninhabited,
|
// representation. If the conversion fails because `src` is uninhabited,
|
||||||
// conclude that the transmutation is unacceptable. Valid instances of
|
// conclude that the transmutation is unacceptable. Valid instances of
|
||||||
// the `dst` type do not exist, either because it's genuinely
|
// the `dst` type do not exist, either because it's genuinely
|
||||||
// uninhabited, or because there are no branches of the tree that are
|
// uninhabited, or because there are no branches of the tree that are
|
||||||
// free of safety invariants.
|
// free of safety invariants.
|
||||||
let dst = match Nfa::from_tree(dst) {
|
let dst = match Dfa::from_tree(dst) {
|
||||||
Ok(dst) => dst,
|
Ok(dst) => dst,
|
||||||
Err(Uninhabited) => return Answer::No(Reason::DstMayHaveSafetyInvariants),
|
Err(Uninhabited) => return Answer::No(Reason::DstMayHaveSafetyInvariants),
|
||||||
};
|
};
|
||||||
|
@ -129,23 +129,6 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C> MaybeTransmutableQuery<Nfa<<C as QueryContext>::Ref>, C>
|
|
||||||
where
|
|
||||||
C: QueryContext,
|
|
||||||
{
|
|
||||||
/// Answers whether a `Nfa` is transmutable into another `Nfa`.
|
|
||||||
///
|
|
||||||
/// This method converts `src` and `dst` to DFAs, then computes an answer using those DFAs.
|
|
||||||
#[inline(always)]
|
|
||||||
#[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))]
|
|
||||||
pub(crate) fn answer(self) -> Answer<<C as QueryContext>::Ref> {
|
|
||||||
let Self { src, dst, assume, context } = self;
|
|
||||||
let src = Dfa::from_nfa(src);
|
|
||||||
let dst = Dfa::from_nfa(dst);
|
|
||||||
MaybeTransmutableQuery { src, dst, assume, context }.answer()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<C> MaybeTransmutableQuery<Dfa<<C as QueryContext>::Ref>, C>
|
impl<C> MaybeTransmutableQuery<Dfa<<C as QueryContext>::Ref>, C>
|
||||||
where
|
where
|
||||||
C: QueryContext,
|
C: QueryContext,
|
||||||
|
@ -173,7 +156,7 @@ where
|
||||||
src_transitions_len = self.src.transitions.len(),
|
src_transitions_len = self.src.transitions.len(),
|
||||||
dst_transitions_len = self.dst.transitions.len()
|
dst_transitions_len = self.dst.transitions.len()
|
||||||
);
|
);
|
||||||
let answer = if dst_state == self.dst.accepting {
|
let answer = if dst_state == self.dst.accept {
|
||||||
// truncation: `size_of(Src) >= size_of(Dst)`
|
// truncation: `size_of(Src) >= size_of(Dst)`
|
||||||
//
|
//
|
||||||
// Why is truncation OK to do? Because even though the Src is bigger, all we care about
|
// Why is truncation OK to do? Because even though the Src is bigger, all we care about
|
||||||
|
@ -190,7 +173,7 @@ where
|
||||||
// that none of the actually-used data can introduce an invalid state for Dst's type, we
|
// that none of the actually-used data can introduce an invalid state for Dst's type, we
|
||||||
// are able to safely transmute, even with truncation.
|
// are able to safely transmute, even with truncation.
|
||||||
Answer::Yes
|
Answer::Yes
|
||||||
} else if src_state == self.src.accepting {
|
} else if src_state == self.src.accept {
|
||||||
// extension: `size_of(Src) >= size_of(Dst)`
|
// extension: `size_of(Src) >= size_of(Dst)`
|
||||||
if let Some(dst_state_prime) = self.dst.byte_from(dst_state, Byte::Uninit) {
|
if let Some(dst_state_prime) = self.dst.byte_from(dst_state, Byte::Uninit) {
|
||||||
self.answer_memo(cache, src_state, dst_state_prime)
|
self.answer_memo(cache, src_state, dst_state_prime)
|
||||||
|
|
|
@ -126,7 +126,7 @@ mod bool {
|
||||||
|
|
||||||
let into_set = |alts: Vec<_>| {
|
let into_set = |alts: Vec<_>| {
|
||||||
#[cfg(feature = "rustc")]
|
#[cfg(feature = "rustc")]
|
||||||
let mut set = crate::Set::default();
|
let mut set = rustc_data_structures::fx::FxIndexSet::default();
|
||||||
#[cfg(not(feature = "rustc"))]
|
#[cfg(not(feature = "rustc"))]
|
||||||
let mut set = std::collections::HashSet::new();
|
let mut set = std::collections::HashSet::new();
|
||||||
set.extend(alts);
|
set.extend(alts);
|
||||||
|
@ -174,3 +174,32 @@ mod bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mod union {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn union() {
|
||||||
|
let [a, b, c, d] = [0, 1, 2, 3];
|
||||||
|
let s = Dfa::from_edges(a, d, &[(a, 0, b), (b, 0, d), (a, 1, c), (c, 1, d)]);
|
||||||
|
|
||||||
|
let t = Dfa::from_edges(a, c, &[(a, 1, b), (b, 0, c)]);
|
||||||
|
|
||||||
|
let mut ctr = 0;
|
||||||
|
let new_state = || {
|
||||||
|
let state = crate::layout::dfa::State(ctr);
|
||||||
|
ctr += 1;
|
||||||
|
state
|
||||||
|
};
|
||||||
|
|
||||||
|
let u = s.clone().union(t.clone(), new_state);
|
||||||
|
|
||||||
|
let expected_u =
|
||||||
|
Dfa::from_edges(b, a, &[(b, 0, c), (b, 1, d), (d, 1, a), (d, 0, a), (c, 0, a)]);
|
||||||
|
|
||||||
|
assert_eq!(u, expected_u);
|
||||||
|
|
||||||
|
assert_eq!(is_transmutable(&s, &u, Assume::default()), Answer::Yes);
|
||||||
|
assert_eq!(is_transmutable(&t, &u, Assume::default()), Answer::Yes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1116,7 +1116,7 @@ impl CStr {
|
||||||
/// with the corresponding <code>&[str]</code> slice. Otherwise, it will
|
/// with the corresponding <code>&[str]</code> slice. Otherwise, it will
|
||||||
/// replace any invalid UTF-8 sequences with
|
/// replace any invalid UTF-8 sequences with
|
||||||
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD] and return a
|
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD] and return a
|
||||||
/// <code>[Cow]::[Owned]\(&[str])</code> with the result.
|
/// <code>[Cow]::[Owned]\([String])</code> with the result.
|
||||||
///
|
///
|
||||||
/// [str]: prim@str "str"
|
/// [str]: prim@str "str"
|
||||||
/// [Borrowed]: Cow::Borrowed
|
/// [Borrowed]: Cow::Borrowed
|
||||||
|
|
|
@ -304,7 +304,7 @@ pub unsafe fn simd_shuffle<T, U, V>(x: T, y: T, idx: U) -> V;
|
||||||
///
|
///
|
||||||
/// `U` must be a vector of pointers to the element type of `T`, with the same length as `T`.
|
/// `U` must be a vector of pointers to the element type of `T`, with the same length as `T`.
|
||||||
///
|
///
|
||||||
/// `V` must be a vector of signed integers with the same length as `T` (but any element size).
|
/// `V` must be a vector of integers with the same length as `T` (but any element size).
|
||||||
///
|
///
|
||||||
/// For each pointer in `ptr`, if the corresponding value in `mask` is `!0`, read the pointer.
|
/// For each pointer in `ptr`, if the corresponding value in `mask` is `!0`, read the pointer.
|
||||||
/// Otherwise if the corresponding value in `mask` is `0`, return the corresponding value from
|
/// Otherwise if the corresponding value in `mask` is `0`, return the corresponding value from
|
||||||
|
@ -325,7 +325,7 @@ pub unsafe fn simd_gather<T, U, V>(val: T, ptr: U, mask: V) -> T;
|
||||||
///
|
///
|
||||||
/// `U` must be a vector of pointers to the element type of `T`, with the same length as `T`.
|
/// `U` must be a vector of pointers to the element type of `T`, with the same length as `T`.
|
||||||
///
|
///
|
||||||
/// `V` must be a vector of signed integers with the same length as `T` (but any element size).
|
/// `V` must be a vector of integers with the same length as `T` (but any element size).
|
||||||
///
|
///
|
||||||
/// For each pointer in `ptr`, if the corresponding value in `mask` is `!0`, write the
|
/// For each pointer in `ptr`, if the corresponding value in `mask` is `!0`, write the
|
||||||
/// corresponding value in `val` to the pointer.
|
/// corresponding value in `val` to the pointer.
|
||||||
|
@ -349,7 +349,7 @@ pub unsafe fn simd_scatter<T, U, V>(val: T, ptr: U, mask: V);
|
||||||
///
|
///
|
||||||
/// `U` must be a pointer to the element type of `T`
|
/// `U` must be a pointer to the element type of `T`
|
||||||
///
|
///
|
||||||
/// `V` must be a vector of signed integers with the same length as `T` (but any element size).
|
/// `V` must be a vector of integers with the same length as `T` (but any element size).
|
||||||
///
|
///
|
||||||
/// For each element, if the corresponding value in `mask` is `!0`, read the corresponding
|
/// For each element, if the corresponding value in `mask` is `!0`, read the corresponding
|
||||||
/// pointer offset from `ptr`.
|
/// pointer offset from `ptr`.
|
||||||
|
@ -372,7 +372,7 @@ pub unsafe fn simd_masked_load<V, U, T>(mask: V, ptr: U, val: T) -> T;
|
||||||
///
|
///
|
||||||
/// `U` must be a pointer to the element type of `T`
|
/// `U` must be a pointer to the element type of `T`
|
||||||
///
|
///
|
||||||
/// `V` must be a vector of signed integers with the same length as `T` (but any element size).
|
/// `V` must be a vector of integers with the same length as `T` (but any element size).
|
||||||
///
|
///
|
||||||
/// For each element, if the corresponding value in `mask` is `!0`, write the corresponding
|
/// For each element, if the corresponding value in `mask` is `!0`, write the corresponding
|
||||||
/// value in `val` to the pointer offset from `ptr`.
|
/// value in `val` to the pointer offset from `ptr`.
|
||||||
|
@ -556,7 +556,7 @@ pub unsafe fn simd_bitmask<T, U>(x: T) -> U;
|
||||||
///
|
///
|
||||||
/// `T` must be a vector.
|
/// `T` must be a vector.
|
||||||
///
|
///
|
||||||
/// `M` must be a signed integer vector with the same length as `T` (but any element size).
|
/// `M` must be an integer vector with the same length as `T` (but any element size).
|
||||||
///
|
///
|
||||||
/// For each element, if the corresponding value in `mask` is `!0`, select the element from
|
/// For each element, if the corresponding value in `mask` is `!0`, select the element from
|
||||||
/// `if_true`. If the corresponding value in `mask` is `0`, select the element from
|
/// `if_true`. If the corresponding value in `mask` is `0`, select the element from
|
||||||
|
|
|
@ -23,6 +23,39 @@ impl<I> Enumerate<I> {
|
||||||
pub(in crate::iter) fn new(iter: I) -> Enumerate<I> {
|
pub(in crate::iter) fn new(iter: I) -> Enumerate<I> {
|
||||||
Enumerate { iter, count: 0 }
|
Enumerate { iter, count: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retrieve the current position of the iterator.
|
||||||
|
///
|
||||||
|
/// If the iterator has not advanced, the position returned will be 0.
|
||||||
|
///
|
||||||
|
/// The position may also exceed the bounds of the iterator to allow for calculating
|
||||||
|
/// the displacement of the iterator from following calls to [`Iterator::next`].
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(next_index)]
|
||||||
|
///
|
||||||
|
/// let arr = ['a', 'b'];
|
||||||
|
///
|
||||||
|
/// let mut iter = arr.iter().enumerate();
|
||||||
|
///
|
||||||
|
/// assert_eq!(iter.next_index(), 0);
|
||||||
|
/// assert_eq!(iter.next(), Some((0, &'a')));
|
||||||
|
///
|
||||||
|
/// assert_eq!(iter.next_index(), 1);
|
||||||
|
/// assert_eq!(iter.next_index(), 1);
|
||||||
|
/// assert_eq!(iter.next(), Some((1, &'b')));
|
||||||
|
///
|
||||||
|
/// assert_eq!(iter.next_index(), 2);
|
||||||
|
/// assert_eq!(iter.next(), None);
|
||||||
|
/// assert_eq!(iter.next_index(), 2);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
#[unstable(feature = "next_index", issue = "130711")]
|
||||||
|
pub fn next_index(&self) -> usize {
|
||||||
|
self.count
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
|
|
@ -1307,10 +1307,12 @@ mod prim_f16 {}
|
||||||
// FIXME: Is there a better place to put this?
|
// FIXME: Is there a better place to put this?
|
||||||
///
|
///
|
||||||
/// | `target_arch` | Extra payloads possible on this platform |
|
/// | `target_arch` | Extra payloads possible on this platform |
|
||||||
/// |---------------|---------|
|
/// |---------------|------------------------------------------|
|
||||||
/// | `x86`, `x86_64`, `arm`, `aarch64`, `riscv32`, `riscv64` | None |
|
// Sorted alphabetically
|
||||||
|
/// | `aarch64`, `arm`, `arm64ec`, `loongarch64`, `powerpc` (except when `target_abi = "spe"`), `powerpc64`, `riscv32`, `riscv64`, `s390x`, `x86`, `x86_64` | None |
|
||||||
|
/// | `nvptx64` | All payloads |
|
||||||
/// | `sparc`, `sparc64` | The all-one payload |
|
/// | `sparc`, `sparc64` | The all-one payload |
|
||||||
/// | `wasm32`, `wasm64` | If all input NaNs are quiet with all-zero payload: None.<br> Otherwise: all possible payloads. |
|
/// | `wasm32`, `wasm64` | If all input NaNs are quiet with all-zero payload: None.<br> Otherwise: all payloads. |
|
||||||
///
|
///
|
||||||
/// For targets not in this table, all payloads are possible.
|
/// For targets not in this table, all payloads are possible.
|
||||||
///
|
///
|
||||||
|
|
|
@ -1739,3 +1739,11 @@ impl<T: ?Sized> PartialOrd for *const T {
|
||||||
*self >= *other
|
*self >= *other
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "raw_ptr_default", since = "CURRENT_RUSTC_VERSION")]
|
||||||
|
impl<T: ?Sized + Thin> Default for *const T {
|
||||||
|
/// Returns the default value of [`null()`][crate::ptr::null].
|
||||||
|
fn default() -> Self {
|
||||||
|
crate::ptr::null()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2156,3 +2156,11 @@ impl<T: ?Sized> PartialOrd for *mut T {
|
||||||
*self >= *other
|
*self >= *other
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "raw_ptr_default", since = "CURRENT_RUSTC_VERSION")]
|
||||||
|
impl<T: ?Sized + Thin> Default for *mut T {
|
||||||
|
/// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
|
||||||
|
fn default() -> Self {
|
||||||
|
crate::ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2814,7 +2814,7 @@ impl<T> [T] {
|
||||||
let half = size / 2;
|
let half = size / 2;
|
||||||
let mid = base + half;
|
let mid = base + half;
|
||||||
|
|
||||||
// SAFETY: the call is made safe by the following inconstants:
|
// SAFETY: the call is made safe by the following invariants:
|
||||||
// - `mid >= 0`: by definition
|
// - `mid >= 0`: by definition
|
||||||
// - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...`
|
// - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...`
|
||||||
let cmp = f(unsafe { self.get_unchecked(mid) });
|
let cmp = f(unsafe { self.get_unchecked(mid) });
|
||||||
|
|
|
@ -120,3 +120,13 @@ fn test_double_ended_enumerate() {
|
||||||
assert_eq!(it.next_back(), Some((2, 3)));
|
assert_eq!(it.next_back(), Some((2, 3)));
|
||||||
assert_eq!(it.next(), None);
|
assert_eq!(it.next(), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_iterator_enumerate_next_index() {
|
||||||
|
let mut it = empty::<i32>().enumerate();
|
||||||
|
assert_eq!(it.next_index(), 0);
|
||||||
|
assert_eq!(it.next_index(), 0);
|
||||||
|
assert_eq!(it.next(), None);
|
||||||
|
assert_eq!(it.next_index(), 0);
|
||||||
|
assert_eq!(it.next_index(), 0);
|
||||||
|
}
|
||||||
|
|
|
@ -63,6 +63,7 @@
|
||||||
#![feature(maybe_uninit_write_slice)]
|
#![feature(maybe_uninit_write_slice)]
|
||||||
#![feature(min_specialization)]
|
#![feature(min_specialization)]
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
|
#![feature(next_index)]
|
||||||
#![feature(numfmt)]
|
#![feature(numfmt)]
|
||||||
#![feature(pattern)]
|
#![feature(pattern)]
|
||||||
#![feature(pointer_is_aligned_to)]
|
#![feature(pointer_is_aligned_to)]
|
||||||
|
|
|
@ -1020,3 +1020,20 @@ fn test_ptr_swap_nonoverlapping_is_untyped() {
|
||||||
ptr_swap_nonoverlapping_is_untyped_inner();
|
ptr_swap_nonoverlapping_is_untyped_inner();
|
||||||
const { ptr_swap_nonoverlapping_is_untyped_inner() };
|
const { ptr_swap_nonoverlapping_is_untyped_inner() };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ptr_default() {
|
||||||
|
#[derive(Default)]
|
||||||
|
struct PtrDefaultTest {
|
||||||
|
ptr: *const u64,
|
||||||
|
}
|
||||||
|
let default = PtrDefaultTest::default();
|
||||||
|
assert!(default.ptr.is_null());
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct PtrMutDefaultTest {
|
||||||
|
ptr: *mut u64,
|
||||||
|
}
|
||||||
|
let default = PtrMutDefaultTest::default();
|
||||||
|
assert!(default.ptr.is_null());
|
||||||
|
}
|
||||||
|
|
|
@ -950,7 +950,7 @@ impl fmt::Debug for ArgsOs {
|
||||||
/// Constants associated with the current target
|
/// Constants associated with the current target
|
||||||
#[stable(feature = "env", since = "1.0.0")]
|
#[stable(feature = "env", since = "1.0.0")]
|
||||||
pub mod consts {
|
pub mod consts {
|
||||||
use crate::sys::env::os;
|
use crate::sys::env_consts::os;
|
||||||
|
|
||||||
/// A string describing the architecture of the CPU that is currently in use.
|
/// A string describing the architecture of the CPU that is currently in use.
|
||||||
/// An example value may be: `"x86"`, `"arm"` or `"riscv64"`.
|
/// An example value may be: `"x86"`, `"arm"` or `"riscv64"`.
|
||||||
|
|
|
@ -1,106 +1,35 @@
|
||||||
#[cfg(target_os = "linux")]
|
//! Constants associated with each target.
|
||||||
|
|
||||||
|
// Replaces the #[else] gate with #[cfg(not(any(…)))] of all the other gates.
|
||||||
|
// This ensures that they must be mutually exclusive and do not have precedence
|
||||||
|
// like cfg_if!.
|
||||||
|
macro cfg_unordered(
|
||||||
|
$(#[cfg($cfg:meta)] $os:item)*
|
||||||
|
#[else] $fallback:item
|
||||||
|
) {
|
||||||
|
$(#[cfg($cfg)] $os)*
|
||||||
|
#[cfg(not(any($($cfg),*)))] $fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep entries sorted alphabetically and mutually exclusive.
|
||||||
|
|
||||||
|
cfg_unordered! {
|
||||||
|
|
||||||
|
#[cfg(target_os = "aix")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "linux";
|
pub const OS: &str = "aix";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".a";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "a";
|
||||||
pub const EXE_SUFFIX: &str = "";
|
pub const EXE_SUFFIX: &str = "";
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "android")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "macos";
|
pub const OS: &str = "android";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".dylib";
|
|
||||||
pub const DLL_EXTENSION: &str = "dylib";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "ios")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "ios";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".dylib";
|
|
||||||
pub const DLL_EXTENSION: &str = "dylib";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "tvos")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "tvos";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".dylib";
|
|
||||||
pub const DLL_EXTENSION: &str = "dylib";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "watchos")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "watchos";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".dylib";
|
|
||||||
pub const DLL_EXTENSION: &str = "dylib";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "visionos")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "visionos";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".dylib";
|
|
||||||
pub const DLL_EXTENSION: &str = "dylib";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "freebsd")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "freebsd";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "dragonfly")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "dragonfly";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "netbsd")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "netbsd";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "openbsd")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "openbsd";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
@ -119,10 +48,10 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "exe";
|
pub const EXE_EXTENSION: &str = "exe";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "android")]
|
#[cfg(target_os = "dragonfly")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "android";
|
pub const OS: &str = "dragonfly";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
@ -130,10 +59,21 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "solaris")]
|
#[cfg(target_os = "emscripten")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "solaris";
|
pub const OS: &str = "emscripten";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = ".js";
|
||||||
|
pub const EXE_EXTENSION: &str = "js";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "espidf")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "espidf";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
@ -141,10 +81,21 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "illumos")]
|
#[cfg(target_os = "freebsd")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "illumos";
|
pub const OS: &str = "freebsd";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "fuchsia")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "fuchsia";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
@ -163,6 +114,17 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "hermit")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "";
|
||||||
|
pub const OS: &str = "hermit";
|
||||||
|
pub const DLL_PREFIX: &str = "";
|
||||||
|
pub const DLL_SUFFIX: &str = "";
|
||||||
|
pub const DLL_EXTENSION: &str = "";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "horizon")]
|
#[cfg(target_os = "horizon")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
|
@ -185,35 +147,24 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "vita")]
|
#[cfg(target_os = "illumos")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "vita";
|
pub const OS: &str = "illumos";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
pub const EXE_SUFFIX: &str = ".elf";
|
pub const EXE_SUFFIX: &str = "";
|
||||||
pub const EXE_EXTENSION: &str = "elf";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(target_os = "emscripten", target_arch = "wasm32"))]
|
#[cfg(target_os = "ios")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "emscripten";
|
pub const OS: &str = "ios";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".dylib";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "dylib";
|
||||||
pub const EXE_SUFFIX: &str = ".js";
|
|
||||||
pub const EXE_EXTENSION: &str = "js";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "fuchsia")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "fuchsia";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
pub const EXE_SUFFIX: &str = "";
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
@ -229,6 +180,39 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "linux";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "macos";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".dylib";
|
||||||
|
pub const DLL_EXTENSION: &str = "dylib";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "netbsd")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "netbsd";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "nto")]
|
#[cfg(target_os = "nto")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
|
@ -240,6 +224,28 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "nuttx")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "nuttx";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "openbsd")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "openbsd";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "redox")]
|
#[cfg(target_os = "redox")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
|
@ -262,6 +268,83 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(all(target_vendor = "fortanix", target_env = "sgx"))]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "";
|
||||||
|
pub const OS: &str = "";
|
||||||
|
pub const DLL_PREFIX: &str = "";
|
||||||
|
pub const DLL_SUFFIX: &str = ".sgxs";
|
||||||
|
pub const DLL_EXTENSION: &str = "sgxs";
|
||||||
|
pub const EXE_SUFFIX: &str = ".sgxs";
|
||||||
|
pub const EXE_EXTENSION: &str = "sgxs";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "solaris")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "solaris";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "solid_asp3")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "itron";
|
||||||
|
pub const OS: &str = "solid";
|
||||||
|
pub const DLL_PREFIX: &str = "";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "tvos")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "tvos";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".dylib";
|
||||||
|
pub const DLL_EXTENSION: &str = "dylib";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "uefi")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "";
|
||||||
|
pub const OS: &str = "uefi";
|
||||||
|
pub const DLL_PREFIX: &str = "";
|
||||||
|
pub const DLL_SUFFIX: &str = "";
|
||||||
|
pub const DLL_EXTENSION: &str = "";
|
||||||
|
pub const EXE_SUFFIX: &str = ".efi";
|
||||||
|
pub const EXE_EXTENSION: &str = "efi";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "visionos")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "visionos";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".dylib";
|
||||||
|
pub const DLL_EXTENSION: &str = "dylib";
|
||||||
|
pub const EXE_SUFFIX: &str = "";
|
||||||
|
pub const EXE_EXTENSION: &str = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "vita")]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "unix";
|
||||||
|
pub const OS: &str = "vita";
|
||||||
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
|
pub const DLL_SUFFIX: &str = ".so";
|
||||||
|
pub const DLL_EXTENSION: &str = "so";
|
||||||
|
pub const EXE_SUFFIX: &str = ".elf";
|
||||||
|
pub const EXE_EXTENSION: &str = "elf";
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "vxworks")]
|
#[cfg(target_os = "vxworks")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
|
@ -273,35 +356,49 @@ pub mod os {
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "espidf")]
|
#[cfg(all(target_family = "wasm", not(any(target_os = "emscripten", target_os = "linux"))))]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "";
|
||||||
|
pub const OS: &str = "";
|
||||||
|
pub const DLL_PREFIX: &str = "";
|
||||||
|
pub const DLL_SUFFIX: &str = ".wasm";
|
||||||
|
pub const DLL_EXTENSION: &str = "wasm";
|
||||||
|
pub const EXE_SUFFIX: &str = ".wasm";
|
||||||
|
pub const EXE_EXTENSION: &str = "wasm";
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "watchos")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "unix";
|
||||||
pub const OS: &str = "espidf";
|
pub const OS: &str = "watchos";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "lib";
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
pub const DLL_SUFFIX: &str = ".dylib";
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
pub const DLL_EXTENSION: &str = "dylib";
|
||||||
pub const EXE_SUFFIX: &str = "";
|
pub const EXE_SUFFIX: &str = "";
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "aix")]
|
#[cfg(target_os = "windows")]
|
||||||
pub mod os {
|
pub mod os {
|
||||||
pub const FAMILY: &str = "unix";
|
pub const FAMILY: &str = "windows";
|
||||||
pub const OS: &str = "aix";
|
pub const OS: &str = "windows";
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
pub const DLL_PREFIX: &str = "";
|
||||||
pub const DLL_SUFFIX: &str = ".a";
|
pub const DLL_SUFFIX: &str = ".dll";
|
||||||
pub const DLL_EXTENSION: &str = "a";
|
pub const DLL_EXTENSION: &str = "dll";
|
||||||
|
pub const EXE_SUFFIX: &str = ".exe";
|
||||||
|
pub const EXE_EXTENSION: &str = "exe";
|
||||||
|
}
|
||||||
|
|
||||||
|
// The fallback when none of the other gates match.
|
||||||
|
#[else]
|
||||||
|
pub mod os {
|
||||||
|
pub const FAMILY: &str = "";
|
||||||
|
pub const OS: &str = "";
|
||||||
|
pub const DLL_PREFIX: &str = "";
|
||||||
|
pub const DLL_SUFFIX: &str = "";
|
||||||
|
pub const DLL_EXTENSION: &str = "";
|
||||||
pub const EXE_SUFFIX: &str = "";
|
pub const EXE_SUFFIX: &str = "";
|
||||||
pub const EXE_EXTENSION: &str = "";
|
pub const EXE_EXTENSION: &str = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "nuttx")]
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "unix";
|
|
||||||
pub const OS: &str = "nuttx";
|
|
||||||
pub const DLL_PREFIX: &str = "lib";
|
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
}
|
|
@ -12,10 +12,11 @@ use libc::c_char;
|
||||||
all(target_os = "linux", not(target_env = "musl")),
|
all(target_os = "linux", not(target_env = "musl")),
|
||||||
target_os = "android",
|
target_os = "android",
|
||||||
target_os = "fuchsia",
|
target_os = "fuchsia",
|
||||||
target_os = "hurd"
|
target_os = "hurd",
|
||||||
|
target_os = "illumos",
|
||||||
))]
|
))]
|
||||||
use libc::dirfd;
|
use libc::dirfd;
|
||||||
#[cfg(target_os = "fuchsia")]
|
#[cfg(any(target_os = "fuchsia", target_os = "illumos"))]
|
||||||
use libc::fstatat as fstatat64;
|
use libc::fstatat as fstatat64;
|
||||||
#[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "hurd"))]
|
#[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "hurd"))]
|
||||||
use libc::fstatat64;
|
use libc::fstatat64;
|
||||||
|
@ -892,7 +893,8 @@ impl DirEntry {
|
||||||
all(target_os = "linux", not(target_env = "musl")),
|
all(target_os = "linux", not(target_env = "musl")),
|
||||||
target_os = "android",
|
target_os = "android",
|
||||||
target_os = "fuchsia",
|
target_os = "fuchsia",
|
||||||
target_os = "hurd"
|
target_os = "hurd",
|
||||||
|
target_os = "illumos",
|
||||||
),
|
),
|
||||||
not(miri) // no dirfd on Miri
|
not(miri) // no dirfd on Miri
|
||||||
))]
|
))]
|
||||||
|
@ -922,6 +924,7 @@ impl DirEntry {
|
||||||
target_os = "android",
|
target_os = "android",
|
||||||
target_os = "fuchsia",
|
target_os = "fuchsia",
|
||||||
target_os = "hurd",
|
target_os = "hurd",
|
||||||
|
target_os = "illumos",
|
||||||
)),
|
)),
|
||||||
miri
|
miri
|
||||||
))]
|
))]
|
||||||
|
|
|
@ -12,6 +12,7 @@ pub mod anonymous_pipe;
|
||||||
pub mod args;
|
pub mod args;
|
||||||
pub mod backtrace;
|
pub mod backtrace;
|
||||||
pub mod cmath;
|
pub mod cmath;
|
||||||
|
pub mod env_consts;
|
||||||
pub mod exit_guard;
|
pub mod exit_guard;
|
||||||
pub mod fd;
|
pub mod fd;
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "";
|
|
||||||
pub const OS: &str = "hermit";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = "";
|
|
||||||
pub const DLL_EXTENSION: &str = "";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
|
@ -18,7 +18,6 @@
|
||||||
|
|
||||||
use crate::os::raw::c_char;
|
use crate::os::raw::c_char;
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
pub mod futex;
|
pub mod futex;
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "";
|
|
||||||
pub const OS: &str = "";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = ".sgxs";
|
|
||||||
pub const DLL_EXTENSION: &str = "sgxs";
|
|
||||||
pub const EXE_SUFFIX: &str = ".sgxs";
|
|
||||||
pub const EXE_EXTENSION: &str = "sgxs";
|
|
||||||
}
|
|
|
@ -9,7 +9,6 @@ use crate::io::ErrorKind;
|
||||||
use crate::sync::atomic::{AtomicBool, Ordering};
|
use crate::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
pub mod abi;
|
pub mod abi;
|
||||||
pub mod env;
|
|
||||||
mod libunwind_integration;
|
mod libunwind_integration;
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "itron";
|
|
||||||
pub const OS: &str = "solid";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = ".so";
|
|
||||||
pub const DLL_EXTENSION: &str = "so";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
|
@ -16,7 +16,6 @@ pub mod itron {
|
||||||
use super::unsupported;
|
use super::unsupported;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
// `error` is `pub(crate)` so that it can be accessed by `itron/error.rs` as
|
// `error` is `pub(crate)` so that it can be accessed by `itron/error.rs` as
|
||||||
// `crate::sys::error`
|
// `crate::sys::error`
|
||||||
pub(crate) mod error;
|
pub(crate) mod error;
|
||||||
|
|
|
@ -6,9 +6,6 @@
|
||||||
#![allow(unused_variables)]
|
#![allow(unused_variables)]
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
#[path = "../unsupported/env.rs"]
|
|
||||||
pub mod env;
|
|
||||||
//pub mod fd;
|
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
pub mod pipe;
|
pub mod pipe;
|
||||||
|
|
|
@ -3,8 +3,6 @@
|
||||||
#[path = "../unsupported/common.rs"]
|
#[path = "../unsupported/common.rs"]
|
||||||
#[deny(unsafe_op_in_unsafe_fn)]
|
#[deny(unsafe_op_in_unsafe_fn)]
|
||||||
mod common;
|
mod common;
|
||||||
#[path = "../unsupported/env.rs"]
|
|
||||||
pub mod env;
|
|
||||||
#[path = "../unsupported/os.rs"]
|
#[path = "../unsupported/os.rs"]
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "";
|
|
||||||
pub const OS: &str = "uefi";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = "";
|
|
||||||
pub const DLL_EXTENSION: &str = "";
|
|
||||||
pub const EXE_SUFFIX: &str = ".efi";
|
|
||||||
pub const EXE_EXTENSION: &str = "efi";
|
|
||||||
}
|
|
|
@ -13,7 +13,6 @@
|
||||||
//! [`OsString`]: crate::ffi::OsString
|
//! [`OsString`]: crate::ffi::OsString
|
||||||
#![forbid(unsafe_op_in_unsafe_fn)]
|
#![forbid(unsafe_op_in_unsafe_fn)]
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
pub mod helpers;
|
pub mod helpers;
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
|
|
|
@ -6,7 +6,6 @@ use crate::io::ErrorKind;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod weak;
|
pub mod weak;
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
#[cfg(target_os = "fuchsia")]
|
#[cfg(target_os = "fuchsia")]
|
||||||
pub mod fuchsia;
|
pub mod fuchsia;
|
||||||
pub mod futex;
|
pub mod futex;
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "";
|
|
||||||
pub const OS: &str = "";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = "";
|
|
||||||
pub const DLL_EXTENSION: &str = "";
|
|
||||||
pub const EXE_SUFFIX: &str = "";
|
|
||||||
pub const EXE_EXTENSION: &str = "";
|
|
||||||
}
|
|
|
@ -1,6 +1,5 @@
|
||||||
#![deny(unsafe_op_in_unsafe_fn)]
|
#![deny(unsafe_op_in_unsafe_fn)]
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
pub mod os;
|
pub mod os;
|
||||||
pub mod pipe;
|
pub mod pipe;
|
||||||
pub mod thread;
|
pub mod thread;
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
#![forbid(unsafe_op_in_unsafe_fn)]
|
|
||||||
|
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "";
|
|
||||||
pub const OS: &str = "";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = ".wasm";
|
|
||||||
pub const DLL_EXTENSION: &str = "wasm";
|
|
||||||
pub const EXE_SUFFIX: &str = ".wasm";
|
|
||||||
pub const EXE_EXTENSION: &str = "wasm";
|
|
||||||
}
|
|
|
@ -13,7 +13,6 @@
|
||||||
//! compiling for wasm. That way it's a compile time error for something that's
|
//! compiling for wasm. That way it's a compile time error for something that's
|
||||||
//! guaranteed to be a runtime error!
|
//! guaranteed to be a runtime error!
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[path = "../wasm/atomics/futex.rs"]
|
#[path = "../wasm/atomics/futex.rs"]
|
||||||
pub mod futex;
|
pub mod futex;
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
//! To begin with, this target mirrors the wasi target 1 to 1, but over
|
//! To begin with, this target mirrors the wasi target 1 to 1, but over
|
||||||
//! time this will change significantly.
|
//! time this will change significantly.
|
||||||
|
|
||||||
#[path = "../wasi/env.rs"]
|
|
||||||
pub mod env;
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[path = "../wasm/atomics/futex.rs"]
|
#[path = "../wasm/atomics/futex.rs"]
|
||||||
pub mod futex;
|
pub mod futex;
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "";
|
|
||||||
pub const OS: &str = "";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = ".wasm";
|
|
||||||
pub const DLL_EXTENSION: &str = "wasm";
|
|
||||||
pub const EXE_SUFFIX: &str = ".wasm";
|
|
||||||
pub const EXE_EXTENSION: &str = "wasm";
|
|
||||||
}
|
|
|
@ -16,7 +16,6 @@
|
||||||
|
|
||||||
#![deny(unsafe_op_in_unsafe_fn)]
|
#![deny(unsafe_op_in_unsafe_fn)]
|
||||||
|
|
||||||
pub mod env;
|
|
||||||
#[path = "../unsupported/os.rs"]
|
#[path = "../unsupported/os.rs"]
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod os {
|
|
||||||
pub const FAMILY: &str = "windows";
|
|
||||||
pub const OS: &str = "windows";
|
|
||||||
pub const DLL_PREFIX: &str = "";
|
|
||||||
pub const DLL_SUFFIX: &str = ".dll";
|
|
||||||
pub const DLL_EXTENSION: &str = "dll";
|
|
||||||
pub const EXE_SUFFIX: &str = ".exe";
|
|
||||||
pub const EXE_EXTENSION: &str = "exe";
|
|
||||||
}
|
|
|
@ -15,7 +15,6 @@ pub mod compat;
|
||||||
pub mod api;
|
pub mod api;
|
||||||
|
|
||||||
pub mod c;
|
pub mod c;
|
||||||
pub mod env;
|
|
||||||
#[cfg(not(target_vendor = "win7"))]
|
#[cfg(not(target_vendor = "win7"))]
|
||||||
pub mod futex;
|
pub mod futex;
|
||||||
pub mod handle;
|
pub mod handle;
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#![forbid(unsafe_op_in_unsafe_fn)]
|
#![forbid(unsafe_op_in_unsafe_fn)]
|
||||||
|
|
||||||
#[path = "../unsupported/env.rs"]
|
|
||||||
pub mod env;
|
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
pub mod pipe;
|
pub mod pipe;
|
||||||
|
|
|
@ -11,7 +11,6 @@
|
||||||
pub const WORD_SIZE: usize = size_of::<u32>();
|
pub const WORD_SIZE: usize = size_of::<u32>();
|
||||||
|
|
||||||
pub mod abi;
|
pub mod abi;
|
||||||
pub mod env;
|
|
||||||
pub mod os;
|
pub mod os;
|
||||||
#[path = "../unsupported/pipe.rs"]
|
#[path = "../unsupported/pipe.rs"]
|
||||||
pub mod pipe;
|
pub mod pipe;
|
||||||
|
|
|
@ -1194,8 +1194,7 @@ pub fn rustc_cargo(
|
||||||
let enzyme_dir = builder.build.out.join(arch).join("enzyme").join("lib");
|
let enzyme_dir = builder.build.out.join(arch).join("enzyme").join("lib");
|
||||||
cargo.rustflag("-L").rustflag(enzyme_dir.to_str().expect("Invalid path"));
|
cargo.rustflag("-L").rustflag(enzyme_dir.to_str().expect("Invalid path"));
|
||||||
|
|
||||||
if !builder.config.dry_run() {
|
if let Some(llvm_config) = builder.llvm_config(builder.config.build) {
|
||||||
let llvm_config = builder.llvm_config(builder.config.build).unwrap();
|
|
||||||
let llvm_version_major = llvm::get_llvm_version_major(builder, &llvm_config);
|
let llvm_version_major = llvm::get_llvm_version_major(builder, &llvm_config);
|
||||||
cargo.rustflag("-l").rustflag(&format!("Enzyme-{llvm_version_major}"));
|
cargo.rustflag("-l").rustflag(&format!("Enzyme-{llvm_version_major}"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -584,6 +584,7 @@ Select which editor you would like to set up [default: None]: ";
|
||||||
"51068d4747a13732440d1a8b8f432603badb1864fa431d83d0fd4f8fa57039e0",
|
"51068d4747a13732440d1a8b8f432603badb1864fa431d83d0fd4f8fa57039e0",
|
||||||
"d29af4d949bbe2371eac928a3c31cf9496b1701aa1c45f11cd6c759865ad5c45",
|
"d29af4d949bbe2371eac928a3c31cf9496b1701aa1c45f11cd6c759865ad5c45",
|
||||||
"b5dd299b93dca3ceeb9b335f929293cb3d4bf4977866fbe7ceeac2a8a9f99088",
|
"b5dd299b93dca3ceeb9b335f929293cb3d4bf4977866fbe7ceeac2a8a9f99088",
|
||||||
|
"631c837b0e98ae35fd48b0e5f743b1ca60adadf2d0a2b23566ba25df372cf1a9",
|
||||||
],
|
],
|
||||||
EditorKind::Helix => &[
|
EditorKind::Helix => &[
|
||||||
"2d3069b8cf1b977e5d4023965eb6199597755e6c96c185ed5f2854f98b83d233",
|
"2d3069b8cf1b977e5d4023965eb6199597755e6c96c185ed5f2854f98b83d233",
|
||||||
|
@ -602,10 +603,12 @@ Select which editor you would like to set up [default: None]: ";
|
||||||
"4eecb58a2168b252077369da446c30ed0e658301efe69691979d1ef0443928f4",
|
"4eecb58a2168b252077369da446c30ed0e658301efe69691979d1ef0443928f4",
|
||||||
"c394386e6133bbf29ffd32c8af0bb3d4aac354cba9ee051f29612aa9350f8f8d",
|
"c394386e6133bbf29ffd32c8af0bb3d4aac354cba9ee051f29612aa9350f8f8d",
|
||||||
"e53e9129ca5ee5dcbd6ec8b68c2d87376474eb154992deba3c6d9ab1703e0717",
|
"e53e9129ca5ee5dcbd6ec8b68c2d87376474eb154992deba3c6d9ab1703e0717",
|
||||||
|
"f954316090936c7e590c253ca9d524008375882fa13c5b41d7e2547a896ff893",
|
||||||
],
|
],
|
||||||
EditorKind::Zed => &[
|
EditorKind::Zed => &[
|
||||||
"bbce727c269d1bd0c98afef4d612eb4ce27aea3c3a8968c5f10b31affbc40b6c",
|
"bbce727c269d1bd0c98afef4d612eb4ce27aea3c3a8968c5f10b31affbc40b6c",
|
||||||
"a5380cf5dd9328731aecc5dfb240d16dac46ed272126b9728006151ef42f5909",
|
"a5380cf5dd9328731aecc5dfb240d16dac46ed272126b9728006151ef42f5909",
|
||||||
|
"2e96bf0d443852b12f016c8fc9840ab3d0a2b4fe0b0fb3a157e8d74d5e7e0e26",
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
use std::fmt::{self, Display};
|
use std::fmt::{self, Display};
|
||||||
|
use std::hash::Hash;
|
||||||
use std::io::IsTerminal;
|
use std::io::IsTerminal;
|
||||||
use std::path::{Path, PathBuf, absolute};
|
use std::path::{Path, PathBuf, absolute};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
@ -701,6 +702,7 @@ pub(crate) struct TomlConfig {
|
||||||
target: Option<HashMap<String, TomlTarget>>,
|
target: Option<HashMap<String, TomlTarget>>,
|
||||||
dist: Option<Dist>,
|
dist: Option<Dist>,
|
||||||
profile: Option<String>,
|
profile: Option<String>,
|
||||||
|
include: Option<Vec<PathBuf>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This enum is used for deserializing change IDs from TOML, allowing both numeric values and the string `"ignore"`.
|
/// This enum is used for deserializing change IDs from TOML, allowing both numeric values and the string `"ignore"`.
|
||||||
|
@ -747,27 +749,35 @@ enum ReplaceOpt {
|
||||||
}
|
}
|
||||||
|
|
||||||
trait Merge {
|
trait Merge {
|
||||||
fn merge(&mut self, other: Self, replace: ReplaceOpt);
|
fn merge(
|
||||||
|
&mut self,
|
||||||
|
parent_config_path: Option<PathBuf>,
|
||||||
|
included_extensions: &mut HashSet<PathBuf>,
|
||||||
|
other: Self,
|
||||||
|
replace: ReplaceOpt,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Merge for TomlConfig {
|
impl Merge for TomlConfig {
|
||||||
fn merge(
|
fn merge(
|
||||||
&mut self,
|
&mut self,
|
||||||
TomlConfig { build, install, llvm, gcc, rust, dist, target, profile, change_id }: Self,
|
parent_config_path: Option<PathBuf>,
|
||||||
|
included_extensions: &mut HashSet<PathBuf>,
|
||||||
|
TomlConfig { build, install, llvm, gcc, rust, dist, target, profile, change_id, include }: Self,
|
||||||
replace: ReplaceOpt,
|
replace: ReplaceOpt,
|
||||||
) {
|
) {
|
||||||
fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>, replace: ReplaceOpt) {
|
fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>, replace: ReplaceOpt) {
|
||||||
if let Some(new) = y {
|
if let Some(new) = y {
|
||||||
if let Some(original) = x {
|
if let Some(original) = x {
|
||||||
original.merge(new, replace);
|
original.merge(None, &mut Default::default(), new, replace);
|
||||||
} else {
|
} else {
|
||||||
*x = Some(new);
|
*x = Some(new);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.change_id.inner.merge(change_id.inner, replace);
|
self.change_id.inner.merge(None, &mut Default::default(), change_id.inner, replace);
|
||||||
self.profile.merge(profile, replace);
|
self.profile.merge(None, &mut Default::default(), profile, replace);
|
||||||
|
|
||||||
do_merge(&mut self.build, build, replace);
|
do_merge(&mut self.build, build, replace);
|
||||||
do_merge(&mut self.install, install, replace);
|
do_merge(&mut self.install, install, replace);
|
||||||
|
@ -782,13 +792,50 @@ impl Merge for TomlConfig {
|
||||||
(Some(original_target), Some(new_target)) => {
|
(Some(original_target), Some(new_target)) => {
|
||||||
for (triple, new) in new_target {
|
for (triple, new) in new_target {
|
||||||
if let Some(original) = original_target.get_mut(&triple) {
|
if let Some(original) = original_target.get_mut(&triple) {
|
||||||
original.merge(new, replace);
|
original.merge(None, &mut Default::default(), new, replace);
|
||||||
} else {
|
} else {
|
||||||
original_target.insert(triple, new);
|
original_target.insert(triple, new);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let parent_dir = parent_config_path
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|p| p.parent().map(ToOwned::to_owned))
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// `include` handled later since we ignore duplicates using `ReplaceOpt::IgnoreDuplicate` to
|
||||||
|
// keep the upper-level configuration to take precedence.
|
||||||
|
for include_path in include.clone().unwrap_or_default().iter().rev() {
|
||||||
|
let include_path = parent_dir.join(include_path);
|
||||||
|
let include_path = include_path.canonicalize().unwrap_or_else(|e| {
|
||||||
|
eprintln!("ERROR: Failed to canonicalize '{}' path: {e}", include_path.display());
|
||||||
|
exit!(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
let included_toml = Config::get_toml_inner(&include_path).unwrap_or_else(|e| {
|
||||||
|
eprintln!("ERROR: Failed to parse '{}': {e}", include_path.display());
|
||||||
|
exit!(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
included_extensions.insert(include_path.clone()),
|
||||||
|
"Cyclic inclusion detected: '{}' is being included again before its previous inclusion was fully processed.",
|
||||||
|
include_path.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
self.merge(
|
||||||
|
Some(include_path.clone()),
|
||||||
|
included_extensions,
|
||||||
|
included_toml,
|
||||||
|
// Ensures that parent configuration always takes precedence
|
||||||
|
// over child configurations.
|
||||||
|
ReplaceOpt::IgnoreDuplicate,
|
||||||
|
);
|
||||||
|
|
||||||
|
included_extensions.remove(&include_path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -803,7 +850,13 @@ macro_rules! define_config {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Merge for $name {
|
impl Merge for $name {
|
||||||
fn merge(&mut self, other: Self, replace: ReplaceOpt) {
|
fn merge(
|
||||||
|
&mut self,
|
||||||
|
_parent_config_path: Option<PathBuf>,
|
||||||
|
_included_extensions: &mut HashSet<PathBuf>,
|
||||||
|
other: Self,
|
||||||
|
replace: ReplaceOpt
|
||||||
|
) {
|
||||||
$(
|
$(
|
||||||
match replace {
|
match replace {
|
||||||
ReplaceOpt::IgnoreDuplicate => {
|
ReplaceOpt::IgnoreDuplicate => {
|
||||||
|
@ -903,7 +956,13 @@ macro_rules! define_config {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Merge for Option<T> {
|
impl<T> Merge for Option<T> {
|
||||||
fn merge(&mut self, other: Self, replace: ReplaceOpt) {
|
fn merge(
|
||||||
|
&mut self,
|
||||||
|
_parent_config_path: Option<PathBuf>,
|
||||||
|
_included_extensions: &mut HashSet<PathBuf>,
|
||||||
|
other: Self,
|
||||||
|
replace: ReplaceOpt,
|
||||||
|
) {
|
||||||
match replace {
|
match replace {
|
||||||
ReplaceOpt::IgnoreDuplicate => {
|
ReplaceOpt::IgnoreDuplicate => {
|
||||||
if self.is_none() {
|
if self.is_none() {
|
||||||
|
@ -1363,13 +1422,15 @@ impl Config {
|
||||||
Self::get_toml(&builder_config_path)
|
Self::get_toml(&builder_config_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_toml(file: &Path) -> Result<TomlConfig, toml::de::Error> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) fn get_toml(_: &Path) -> Result<TomlConfig, toml::de::Error> {
|
return Ok(TomlConfig::default());
|
||||||
Ok(TomlConfig::default())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
pub(crate) fn get_toml(file: &Path) -> Result<TomlConfig, toml::de::Error> {
|
Self::get_toml_inner(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_toml_inner(file: &Path) -> Result<TomlConfig, toml::de::Error> {
|
||||||
let contents =
|
let contents =
|
||||||
t!(fs::read_to_string(file), format!("config file {} not found", file.display()));
|
t!(fs::read_to_string(file), format!("config file {} not found", file.display()));
|
||||||
// Deserialize to Value and then TomlConfig to prevent the Deserialize impl of
|
// Deserialize to Value and then TomlConfig to prevent the Deserialize impl of
|
||||||
|
@ -1548,7 +1609,8 @@ impl Config {
|
||||||
// but not if `bootstrap.toml` hasn't been created.
|
// but not if `bootstrap.toml` hasn't been created.
|
||||||
let mut toml = if !using_default_path || toml_path.exists() {
|
let mut toml = if !using_default_path || toml_path.exists() {
|
||||||
config.config = Some(if cfg!(not(test)) {
|
config.config = Some(if cfg!(not(test)) {
|
||||||
toml_path.canonicalize().unwrap()
|
toml_path = toml_path.canonicalize().unwrap();
|
||||||
|
toml_path.clone()
|
||||||
} else {
|
} else {
|
||||||
toml_path.clone()
|
toml_path.clone()
|
||||||
});
|
});
|
||||||
|
@ -1576,6 +1638,26 @@ impl Config {
|
||||||
toml.profile = Some("dist".into());
|
toml.profile = Some("dist".into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Reverse the list to ensure the last added config extension remains the most dominant.
|
||||||
|
// For example, given ["a.toml", "b.toml"], "b.toml" should take precedence over "a.toml".
|
||||||
|
//
|
||||||
|
// This must be handled before applying the `profile` since `include`s should always take
|
||||||
|
// precedence over `profile`s.
|
||||||
|
for include_path in toml.include.clone().unwrap_or_default().iter().rev() {
|
||||||
|
let include_path = toml_path.parent().unwrap().join(include_path);
|
||||||
|
|
||||||
|
let included_toml = get_toml(&include_path).unwrap_or_else(|e| {
|
||||||
|
eprintln!("ERROR: Failed to parse '{}': {e}", include_path.display());
|
||||||
|
exit!(2);
|
||||||
|
});
|
||||||
|
toml.merge(
|
||||||
|
Some(include_path),
|
||||||
|
&mut Default::default(),
|
||||||
|
included_toml,
|
||||||
|
ReplaceOpt::IgnoreDuplicate,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(include) = &toml.profile {
|
if let Some(include) = &toml.profile {
|
||||||
// Allows creating alias for profile names, allowing
|
// Allows creating alias for profile names, allowing
|
||||||
// profiles to be renamed while maintaining back compatibility
|
// profiles to be renamed while maintaining back compatibility
|
||||||
|
@ -1597,7 +1679,12 @@ impl Config {
|
||||||
);
|
);
|
||||||
exit!(2);
|
exit!(2);
|
||||||
});
|
});
|
||||||
toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate);
|
toml.merge(
|
||||||
|
Some(include_path),
|
||||||
|
&mut Default::default(),
|
||||||
|
included_toml,
|
||||||
|
ReplaceOpt::IgnoreDuplicate,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut override_toml = TomlConfig::default();
|
let mut override_toml = TomlConfig::default();
|
||||||
|
@ -1608,7 +1695,12 @@ impl Config {
|
||||||
|
|
||||||
let mut err = match get_table(option) {
|
let mut err = match get_table(option) {
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate);
|
override_toml.merge(
|
||||||
|
None,
|
||||||
|
&mut Default::default(),
|
||||||
|
v,
|
||||||
|
ReplaceOpt::ErrorOnDuplicate,
|
||||||
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Err(e) => e,
|
Err(e) => e,
|
||||||
|
@ -1619,7 +1711,12 @@ impl Config {
|
||||||
if !value.contains('"') {
|
if !value.contains('"') {
|
||||||
match get_table(&format!(r#"{key}="{value}""#)) {
|
match get_table(&format!(r#"{key}="{value}""#)) {
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate);
|
override_toml.merge(
|
||||||
|
None,
|
||||||
|
&mut Default::default(),
|
||||||
|
v,
|
||||||
|
ReplaceOpt::ErrorOnDuplicate,
|
||||||
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Err(e) => err = e,
|
Err(e) => err = e,
|
||||||
|
@ -1629,7 +1726,7 @@ impl Config {
|
||||||
eprintln!("failed to parse override `{option}`: `{err}");
|
eprintln!("failed to parse override `{option}`: `{err}");
|
||||||
exit!(2)
|
exit!(2)
|
||||||
}
|
}
|
||||||
toml.merge(override_toml, ReplaceOpt::Override);
|
toml.merge(None, &mut Default::default(), override_toml, ReplaceOpt::Override);
|
||||||
|
|
||||||
config.change_id = toml.change_id.inner;
|
config.change_id = toml.change_id.inner;
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::env;
|
|
||||||
use std::fs::{File, remove_file};
|
use std::fs::{File, remove_file};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::Path;
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::{env, fs};
|
||||||
|
|
||||||
use build_helper::ci::CiEnv;
|
use build_helper::ci::CiEnv;
|
||||||
use clap::CommandFactory;
|
use clap::CommandFactory;
|
||||||
|
@ -23,6 +23,27 @@ pub(crate) fn parse(config: &str) -> Config {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_toml(file: &Path) -> Result<TomlConfig, toml::de::Error> {
|
||||||
|
let contents = std::fs::read_to_string(file).unwrap();
|
||||||
|
toml::from_str(&contents).and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helps with debugging by using consistent test-specific directories instead of
|
||||||
|
/// random temporary directories.
|
||||||
|
fn prepare_test_specific_dir() -> PathBuf {
|
||||||
|
let current = std::thread::current();
|
||||||
|
// Replace "::" with "_" to make it safe for directory names on Windows systems
|
||||||
|
let test_path = current.name().unwrap().replace("::", "_");
|
||||||
|
|
||||||
|
let testdir = parse("").tempdir().join(test_path);
|
||||||
|
|
||||||
|
// clean up any old test files
|
||||||
|
let _ = fs::remove_dir_all(&testdir);
|
||||||
|
let _ = fs::create_dir_all(&testdir);
|
||||||
|
|
||||||
|
testdir
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn download_ci_llvm() {
|
fn download_ci_llvm() {
|
||||||
let config = parse("llvm.download-ci-llvm = false");
|
let config = parse("llvm.download-ci-llvm = false");
|
||||||
|
@ -539,3 +560,189 @@ fn test_ci_flag() {
|
||||||
let config = Config::parse_inner(Flags::parse(&["check".into()]), |&_| toml::from_str(""));
|
let config = Config::parse_inner(Flags::parse(&["check".into()]), |&_| toml::from_str(""));
|
||||||
assert_eq!(config.is_running_on_ci, CiEnv::is_ci());
|
assert_eq!(config.is_running_on_ci, CiEnv::is_ci());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_precedence_of_includes() {
|
||||||
|
let testdir = prepare_test_specific_dir();
|
||||||
|
|
||||||
|
let root_config = testdir.join("config.toml");
|
||||||
|
let root_config_content = br#"
|
||||||
|
include = ["./extension.toml"]
|
||||||
|
|
||||||
|
[llvm]
|
||||||
|
link-jobs = 2
|
||||||
|
"#;
|
||||||
|
File::create(&root_config).unwrap().write_all(root_config_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
change-id=543
|
||||||
|
include = ["./extension2.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension2.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
change-id=742
|
||||||
|
|
||||||
|
[llvm]
|
||||||
|
link-jobs = 10
|
||||||
|
|
||||||
|
[build]
|
||||||
|
description = "Some creative description"
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let config = Config::parse_inner(
|
||||||
|
Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]),
|
||||||
|
get_toml,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(config.change_id.unwrap(), ChangeId::Id(543));
|
||||||
|
assert_eq!(config.llvm_link_jobs.unwrap(), 2);
|
||||||
|
assert_eq!(config.description.unwrap(), "Some creative description");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic(expected = "Cyclic inclusion detected")]
|
||||||
|
fn test_cyclic_include_direct() {
|
||||||
|
let testdir = prepare_test_specific_dir();
|
||||||
|
|
||||||
|
let root_config = testdir.join("config.toml");
|
||||||
|
let root_config_content = br#"
|
||||||
|
include = ["./extension.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(&root_config).unwrap().write_all(root_config_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["./config.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let config = Config::parse_inner(
|
||||||
|
Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]),
|
||||||
|
get_toml,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic(expected = "Cyclic inclusion detected")]
|
||||||
|
fn test_cyclic_include_indirect() {
|
||||||
|
let testdir = prepare_test_specific_dir();
|
||||||
|
|
||||||
|
let root_config = testdir.join("config.toml");
|
||||||
|
let root_config_content = br#"
|
||||||
|
include = ["./extension.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(&root_config).unwrap().write_all(root_config_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["./extension2.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension2.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["./extension3.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension3.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["./extension.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let config = Config::parse_inner(
|
||||||
|
Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]),
|
||||||
|
get_toml,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_include_absolute_paths() {
|
||||||
|
let testdir = prepare_test_specific_dir();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension.toml");
|
||||||
|
File::create(&extension).unwrap().write_all(&[]).unwrap();
|
||||||
|
|
||||||
|
let root_config = testdir.join("config.toml");
|
||||||
|
let extension_absolute_path =
|
||||||
|
extension.canonicalize().unwrap().to_str().unwrap().replace('\\', r"\\");
|
||||||
|
let root_config_content = format!(r#"include = ["{}"]"#, extension_absolute_path);
|
||||||
|
File::create(&root_config).unwrap().write_all(root_config_content.as_bytes()).unwrap();
|
||||||
|
|
||||||
|
let config = Config::parse_inner(
|
||||||
|
Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]),
|
||||||
|
get_toml,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_include_relative_paths() {
|
||||||
|
let testdir = prepare_test_specific_dir();
|
||||||
|
|
||||||
|
let _ = fs::create_dir_all(&testdir.join("subdir/another_subdir"));
|
||||||
|
|
||||||
|
let root_config = testdir.join("config.toml");
|
||||||
|
let root_config_content = br#"
|
||||||
|
include = ["./subdir/extension.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(&root_config).unwrap().write_all(root_config_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("subdir/extension.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["../extension2.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension2.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["./subdir/another_subdir/extension3.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("subdir/another_subdir/extension3.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
include = ["../../extension4.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension4.toml");
|
||||||
|
File::create(extension).unwrap().write_all(&[]).unwrap();
|
||||||
|
|
||||||
|
let config = Config::parse_inner(
|
||||||
|
Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]),
|
||||||
|
get_toml,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_include_precedence_over_profile() {
|
||||||
|
let testdir = prepare_test_specific_dir();
|
||||||
|
|
||||||
|
let root_config = testdir.join("config.toml");
|
||||||
|
let root_config_content = br#"
|
||||||
|
profile = "dist"
|
||||||
|
include = ["./extension.toml"]
|
||||||
|
"#;
|
||||||
|
File::create(&root_config).unwrap().write_all(root_config_content).unwrap();
|
||||||
|
|
||||||
|
let extension = testdir.join("extension.toml");
|
||||||
|
let extension_content = br#"
|
||||||
|
[rust]
|
||||||
|
channel = "dev"
|
||||||
|
"#;
|
||||||
|
File::create(extension).unwrap().write_all(extension_content).unwrap();
|
||||||
|
|
||||||
|
let config = Config::parse_inner(
|
||||||
|
Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]),
|
||||||
|
get_toml,
|
||||||
|
);
|
||||||
|
|
||||||
|
// "dist" profile would normally set the channel to "auto-detect", but includes should
|
||||||
|
// override profile settings, so we expect this to be "dev" here.
|
||||||
|
assert_eq!(config.channel, "dev");
|
||||||
|
}
|
||||||
|
|
|
@ -396,4 +396,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
|
||||||
severity: ChangeSeverity::Info,
|
severity: ChangeSeverity::Info,
|
||||||
summary: "Added a new option `build.compiletest-use-stage0-libtest` to force `compiletest` to use the stage 0 libtest.",
|
summary: "Added a new option `build.compiletest-use-stage0-libtest` to force `compiletest` to use the stage 0 libtest.",
|
||||||
},
|
},
|
||||||
|
ChangeInfo {
|
||||||
|
change_id: 138934,
|
||||||
|
severity: ChangeSeverity::Info,
|
||||||
|
summary: "Added new option `include` to create config extensions.",
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
|
@ -64,12 +64,63 @@ version = "1.0.95"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "askama"
|
||||||
|
version = "0.13.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5d4744ed2eef2645831b441d8f5459689ade2ab27c854488fbab1fbe94fce1a7"
|
||||||
|
dependencies = [
|
||||||
|
"askama_derive",
|
||||||
|
"itoa",
|
||||||
|
"percent-encoding",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "askama_derive"
|
||||||
|
version = "0.13.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d661e0f57be36a5c14c48f78d09011e67e0cb618f269cca9f2fd8d15b68c46ac"
|
||||||
|
dependencies = [
|
||||||
|
"askama_parser",
|
||||||
|
"basic-toml",
|
||||||
|
"memchr",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"rustc-hash",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "askama_parser"
|
||||||
|
version = "0.13.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cf315ce6524c857bb129ff794935cf6d42c82a6cff60526fe2a63593de4d0d4f"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
|
"winnow",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.22.1"
|
version = "0.22.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "basic-toml"
|
||||||
|
version = "0.1.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "build_helper"
|
name = "build_helper"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
@ -104,6 +155,7 @@ name = "citool"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"askama",
|
||||||
"build_helper",
|
"build_helper",
|
||||||
"clap",
|
"clap",
|
||||||
"csv",
|
"csv",
|
||||||
|
@ -646,6 +698,12 @@ dependencies = [
|
||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-hash"
|
||||||
|
version = "2.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls"
|
name = "rustls"
|
||||||
version = "0.23.23"
|
version = "0.23.23"
|
||||||
|
@ -1026,6 +1084,15 @@ version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winnow"
|
||||||
|
version = "0.7.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "63d3fcd9bba44b03821e7d699eeee959f3126dcc4aa8e4ae18ec617c2a5cea10"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "write16"
|
name = "write16"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
|
|
|
@ -5,6 +5,7 @@ edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
|
askama = "0.13"
|
||||||
clap = { version = "4.5", features = ["derive"] }
|
clap = { version = "4.5", features = ["derive"] }
|
||||||
csv = "1"
|
csv = "1"
|
||||||
diff = "0.1"
|
diff = "0.1"
|
||||||
|
|
|
@ -8,9 +8,9 @@ use build_helper::metrics::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::github::JobInfoResolver;
|
use crate::github::JobInfoResolver;
|
||||||
use crate::metrics;
|
|
||||||
use crate::metrics::{JobMetrics, JobName, get_test_suites};
|
use crate::metrics::{JobMetrics, JobName, get_test_suites};
|
||||||
use crate::utils::{output_details, pluralize};
|
use crate::utils::{output_details, pluralize};
|
||||||
|
use crate::{metrics, utils};
|
||||||
|
|
||||||
/// Outputs durations of individual bootstrap steps from the gathered bootstrap invocations,
|
/// Outputs durations of individual bootstrap steps from the gathered bootstrap invocations,
|
||||||
/// and also a table with summarized information about executed tests.
|
/// and also a table with summarized information about executed tests.
|
||||||
|
@ -394,18 +394,17 @@ fn aggregate_tests(metrics: &JsonRoot) -> TestSuiteData {
|
||||||
// Poor man's detection of doctests based on the "(line XYZ)" suffix
|
// Poor man's detection of doctests based on the "(line XYZ)" suffix
|
||||||
let is_doctest = matches!(suite.metadata, TestSuiteMetadata::CargoPackage { .. })
|
let is_doctest = matches!(suite.metadata, TestSuiteMetadata::CargoPackage { .. })
|
||||||
&& test.name.contains("(line");
|
&& test.name.contains("(line");
|
||||||
let test_entry = Test { name: generate_test_name(&test.name), stage, is_doctest };
|
let test_entry = Test {
|
||||||
|
name: utils::normalize_path_delimiters(&test.name).to_string(),
|
||||||
|
stage,
|
||||||
|
is_doctest,
|
||||||
|
};
|
||||||
tests.insert(test_entry, test.outcome.clone());
|
tests.insert(test_entry, test.outcome.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TestSuiteData { tests }
|
TestSuiteData { tests }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Normalizes Windows-style path delimiters to Unix-style paths.
|
|
||||||
fn generate_test_name(name: &str) -> String {
|
|
||||||
name.replace('\\', "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Prints test changes in Markdown format to stdout.
|
/// Prints test changes in Markdown format to stdout.
|
||||||
fn report_test_diffs(
|
fn report_test_diffs(
|
||||||
diff: AggregatedTestDiffs,
|
diff: AggregatedTestDiffs,
|
||||||
|
|
|
@ -4,6 +4,7 @@ mod datadog;
|
||||||
mod github;
|
mod github;
|
||||||
mod jobs;
|
mod jobs;
|
||||||
mod metrics;
|
mod metrics;
|
||||||
|
mod test_dashboard;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
@ -22,7 +23,8 @@ use crate::datadog::upload_datadog_metric;
|
||||||
use crate::github::JobInfoResolver;
|
use crate::github::JobInfoResolver;
|
||||||
use crate::jobs::RunType;
|
use crate::jobs::RunType;
|
||||||
use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics};
|
use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics};
|
||||||
use crate::utils::load_env_var;
|
use crate::test_dashboard::generate_test_dashboard;
|
||||||
|
use crate::utils::{load_env_var, output_details};
|
||||||
|
|
||||||
const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/..");
|
const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/..");
|
||||||
const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
|
const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
|
||||||
|
@ -180,12 +182,26 @@ fn postprocess_metrics(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn post_merge_report(db: JobDatabase, current: String, parent: String) -> anyhow::Result<()> {
|
fn post_merge_report(db: JobDatabase, current: String, parent: String) -> anyhow::Result<()> {
|
||||||
let metrics = download_auto_job_metrics(&db, &parent, ¤t)?;
|
let metrics = download_auto_job_metrics(&db, Some(&parent), ¤t)?;
|
||||||
|
|
||||||
println!("\nComparing {parent} (parent) -> {current} (this PR)\n");
|
println!("\nComparing {parent} (parent) -> {current} (this PR)\n");
|
||||||
|
|
||||||
let mut job_info_resolver = JobInfoResolver::new();
|
let mut job_info_resolver = JobInfoResolver::new();
|
||||||
output_test_diffs(&metrics, &mut job_info_resolver);
|
output_test_diffs(&metrics, &mut job_info_resolver);
|
||||||
|
|
||||||
|
output_details("Test dashboard", || {
|
||||||
|
println!(
|
||||||
|
r#"Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo run --manifest-path src/ci/citool/Cargo.toml -- \
|
||||||
|
test-dashboard {current} --output-dir test-dashboard
|
||||||
|
```
|
||||||
|
And then open `test-dashboard/index.html` in your browser to see an overview of all executed tests.
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
output_largest_duration_changes(&metrics, &mut job_info_resolver);
|
output_largest_duration_changes(&metrics, &mut job_info_resolver);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -234,6 +250,14 @@ enum Args {
|
||||||
/// Current commit that will be compared to `parent`.
|
/// Current commit that will be compared to `parent`.
|
||||||
current: String,
|
current: String,
|
||||||
},
|
},
|
||||||
|
/// Generate a directory containing a HTML dashboard of test results from a CI run.
|
||||||
|
TestDashboard {
|
||||||
|
/// Commit SHA that was tested on CI to analyze.
|
||||||
|
current: String,
|
||||||
|
/// Output path for the HTML directory.
|
||||||
|
#[clap(long)]
|
||||||
|
output_dir: PathBuf,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(clap::ValueEnum, Clone)]
|
#[derive(clap::ValueEnum, Clone)]
|
||||||
|
@ -275,7 +299,11 @@ fn main() -> anyhow::Result<()> {
|
||||||
postprocess_metrics(metrics_path, parent, job_name)?;
|
postprocess_metrics(metrics_path, parent, job_name)?;
|
||||||
}
|
}
|
||||||
Args::PostMergeReport { current, parent } => {
|
Args::PostMergeReport { current, parent } => {
|
||||||
post_merge_report(load_db(default_jobs_file)?, current, parent)?;
|
post_merge_report(load_db(&default_jobs_file)?, current, parent)?;
|
||||||
|
}
|
||||||
|
Args::TestDashboard { current, output_dir } => {
|
||||||
|
let db = load_db(&default_jobs_file)?;
|
||||||
|
generate_test_dashboard(db, ¤t, &output_dir)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,14 +46,15 @@ pub struct JobMetrics {
|
||||||
/// `parent` and `current` should be commit SHAs.
|
/// `parent` and `current` should be commit SHAs.
|
||||||
pub fn download_auto_job_metrics(
|
pub fn download_auto_job_metrics(
|
||||||
job_db: &JobDatabase,
|
job_db: &JobDatabase,
|
||||||
parent: &str,
|
parent: Option<&str>,
|
||||||
current: &str,
|
current: &str,
|
||||||
) -> anyhow::Result<HashMap<JobName, JobMetrics>> {
|
) -> anyhow::Result<HashMap<JobName, JobMetrics>> {
|
||||||
let mut jobs = HashMap::default();
|
let mut jobs = HashMap::default();
|
||||||
|
|
||||||
for job in &job_db.auto_jobs {
|
for job in &job_db.auto_jobs {
|
||||||
eprintln!("Downloading metrics of job {}", job.name);
|
eprintln!("Downloading metrics of job {}", job.name);
|
||||||
let metrics_parent = match download_job_metrics(&job.name, parent) {
|
let metrics_parent =
|
||||||
|
parent.and_then(|parent| match download_job_metrics(&job.name, parent) {
|
||||||
Ok(metrics) => Some(metrics),
|
Ok(metrics) => Some(metrics),
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
|
@ -63,7 +64,7 @@ Maybe it was newly added?"#,
|
||||||
);
|
);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
};
|
});
|
||||||
let metrics_current = download_job_metrics(&job.name, current)?;
|
let metrics_current = download_job_metrics(&job.name, current)?;
|
||||||
jobs.insert(
|
jobs.insert(
|
||||||
job.name.clone(),
|
job.name.clone(),
|
||||||
|
|
216
src/ci/citool/src/test_dashboard.rs
Normal file
216
src/ci/citool/src/test_dashboard.rs
Normal file
|
@ -0,0 +1,216 @@
|
||||||
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::BufWriter;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use askama::Template;
|
||||||
|
use build_helper::metrics::{TestOutcome, TestSuiteMetadata};
|
||||||
|
|
||||||
|
use crate::jobs::JobDatabase;
|
||||||
|
use crate::metrics::{JobMetrics, JobName, download_auto_job_metrics, get_test_suites};
|
||||||
|
use crate::utils::normalize_path_delimiters;
|
||||||
|
|
||||||
|
/// Generate a set of HTML files into a directory that contain a dashboard of test results.
|
||||||
|
pub fn generate_test_dashboard(
|
||||||
|
db: JobDatabase,
|
||||||
|
current: &str,
|
||||||
|
output_dir: &Path,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let metrics = download_auto_job_metrics(&db, None, current)?;
|
||||||
|
let suites = gather_test_suites(&metrics);
|
||||||
|
|
||||||
|
std::fs::create_dir_all(output_dir)?;
|
||||||
|
|
||||||
|
let test_count = suites.test_count();
|
||||||
|
write_page(output_dir, "index.html", &TestSuitesPage { suites, test_count })?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_page<T: Template>(dir: &Path, name: &str, template: &T) -> anyhow::Result<()> {
|
||||||
|
let mut file = BufWriter::new(File::create(dir.join(name))?);
|
||||||
|
Template::write_into(template, &mut file)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gather_test_suites(job_metrics: &HashMap<JobName, JobMetrics>) -> TestSuites {
|
||||||
|
struct CoarseTestSuite<'a> {
|
||||||
|
tests: BTreeMap<String, Test<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut suites: HashMap<String, CoarseTestSuite> = HashMap::new();
|
||||||
|
|
||||||
|
// First, gather tests from all jobs, stages and targets, and aggregate them per suite
|
||||||
|
// Only work with compiletest suites.
|
||||||
|
for (job, metrics) in job_metrics {
|
||||||
|
let test_suites = get_test_suites(&metrics.current);
|
||||||
|
for suite in test_suites {
|
||||||
|
let (suite_name, stage, target) = match &suite.metadata {
|
||||||
|
TestSuiteMetadata::CargoPackage { .. } => {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
TestSuiteMetadata::Compiletest { suite, stage, target, .. } => {
|
||||||
|
(suite.clone(), *stage, target)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let suite_entry = suites
|
||||||
|
.entry(suite_name.clone())
|
||||||
|
.or_insert_with(|| CoarseTestSuite { tests: Default::default() });
|
||||||
|
let test_metadata = TestMetadata { job, stage, target };
|
||||||
|
|
||||||
|
for test in &suite.tests {
|
||||||
|
let test_name = normalize_test_name(&test.name, &suite_name);
|
||||||
|
let (test_name, variant_name) = match test_name.rsplit_once('#') {
|
||||||
|
Some((name, variant)) => (name.to_string(), variant.to_string()),
|
||||||
|
None => (test_name, "".to_string()),
|
||||||
|
};
|
||||||
|
let test_entry = suite_entry
|
||||||
|
.tests
|
||||||
|
.entry(test_name.clone())
|
||||||
|
.or_insert_with(|| Test { revisions: Default::default() });
|
||||||
|
let variant_entry = test_entry
|
||||||
|
.revisions
|
||||||
|
.entry(variant_name)
|
||||||
|
.or_insert_with(|| TestResults { passed: vec![], ignored: vec![] });
|
||||||
|
|
||||||
|
match test.outcome {
|
||||||
|
TestOutcome::Passed => {
|
||||||
|
variant_entry.passed.push(test_metadata);
|
||||||
|
}
|
||||||
|
TestOutcome::Ignored { ignore_reason: _ } => {
|
||||||
|
variant_entry.ignored.push(test_metadata);
|
||||||
|
}
|
||||||
|
TestOutcome::Failed => {
|
||||||
|
eprintln!("Warning: failed test {test_name}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then, split the suites per directory
|
||||||
|
let mut suites = suites.into_iter().collect::<Vec<_>>();
|
||||||
|
suites.sort_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
|
||||||
|
let suites = suites
|
||||||
|
.into_iter()
|
||||||
|
.map(|(suite_name, suite)| TestSuite { group: build_test_group(&suite_name, suite.tests) })
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
TestSuites { suites }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively expand a test group based on filesystem hierarchy.
|
||||||
|
fn build_test_group<'a>(name: &str, tests: BTreeMap<String, Test<'a>>) -> TestGroup<'a> {
|
||||||
|
let mut root_tests = vec![];
|
||||||
|
let mut subdirs: BTreeMap<String, BTreeMap<String, Test<'a>>> = Default::default();
|
||||||
|
|
||||||
|
// Split tests into root tests and tests located in subdirectories
|
||||||
|
for (name, test) in tests {
|
||||||
|
let mut components = Path::new(&name).components().peekable();
|
||||||
|
let subdir = components.next().unwrap();
|
||||||
|
|
||||||
|
if components.peek().is_none() {
|
||||||
|
// This is a root test
|
||||||
|
root_tests.push((name, test));
|
||||||
|
} else {
|
||||||
|
// This is a test in a nested directory
|
||||||
|
let subdir_tests =
|
||||||
|
subdirs.entry(subdir.as_os_str().to_str().unwrap().to_string()).or_default();
|
||||||
|
let test_name =
|
||||||
|
components.into_iter().collect::<PathBuf>().to_str().unwrap().to_string();
|
||||||
|
subdir_tests.insert(test_name, test);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let dirs = subdirs
|
||||||
|
.into_iter()
|
||||||
|
.map(|(name, tests)| {
|
||||||
|
let group = build_test_group(&name, tests);
|
||||||
|
(name, group)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
TestGroup { name: name.to_string(), root_tests, groups: dirs }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compiletest tests start with `[suite] tests/[suite]/a/b/c...`.
|
||||||
|
/// Remove the `[suite] tests/[suite]/` prefix so that we can find the filesystem path.
|
||||||
|
/// Also normalizes path delimiters.
|
||||||
|
fn normalize_test_name(name: &str, suite_name: &str) -> String {
|
||||||
|
let name = normalize_path_delimiters(name);
|
||||||
|
let name = name.as_ref();
|
||||||
|
let name = name.strip_prefix(&format!("[{suite_name}]")).unwrap_or(name).trim();
|
||||||
|
let name = name.strip_prefix("tests/").unwrap_or(name);
|
||||||
|
let name = name.strip_prefix(suite_name).unwrap_or(name);
|
||||||
|
name.trim_start_matches("/").to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestSuites<'a> {
|
||||||
|
suites: Vec<TestSuite<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TestSuites<'a> {
|
||||||
|
fn test_count(&self) -> u64 {
|
||||||
|
self.suites.iter().map(|suite| suite.group.test_count()).sum::<u64>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestSuite<'a> {
|
||||||
|
group: TestGroup<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestResults<'a> {
|
||||||
|
passed: Vec<TestMetadata<'a>>,
|
||||||
|
ignored: Vec<TestMetadata<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Test<'a> {
|
||||||
|
revisions: BTreeMap<String, TestResults<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Test<'a> {
|
||||||
|
/// If this is a test without revisions, it will have a single entry in `revisions` with
|
||||||
|
/// an empty string as the revision name.
|
||||||
|
fn single_test(&self) -> Option<&TestResults<'a>> {
|
||||||
|
if self.revisions.len() == 1 {
|
||||||
|
self.revisions.iter().next().take_if(|e| e.0.is_empty()).map(|e| e.1)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
struct TestMetadata<'a> {
|
||||||
|
job: &'a str,
|
||||||
|
stage: u32,
|
||||||
|
target: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have to use a template for the TestGroup instead of a macro, because
|
||||||
|
// macros cannot be recursive in askama at the moment.
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "test_group.askama")]
|
||||||
|
/// Represents a group of tests
|
||||||
|
struct TestGroup<'a> {
|
||||||
|
name: String,
|
||||||
|
/// Tests located directly in this directory
|
||||||
|
root_tests: Vec<(String, Test<'a>)>,
|
||||||
|
/// Nested directories with additional tests
|
||||||
|
groups: Vec<(String, TestGroup<'a>)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TestGroup<'a> {
|
||||||
|
fn test_count(&self) -> u64 {
|
||||||
|
let root = self.root_tests.len() as u64;
|
||||||
|
self.groups.iter().map(|(_, group)| group.test_count()).sum::<u64>() + root
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "test_suites.askama")]
|
||||||
|
struct TestSuitesPage<'a> {
|
||||||
|
suites: TestSuites<'a>,
|
||||||
|
test_count: u64,
|
||||||
|
}
|
|
@ -1,3 +1,4 @@
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
@ -28,3 +29,8 @@ where
|
||||||
func();
|
func();
|
||||||
println!("</details>\n");
|
println!("</details>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Normalizes Windows-style path delimiters to Unix-style paths.
|
||||||
|
pub fn normalize_path_delimiters(name: &str) -> Cow<str> {
|
||||||
|
if name.contains("\\") { name.replace('\\', "/").into() } else { name.into() }
|
||||||
|
}
|
||||||
|
|
22
src/ci/citool/templates/layout.askama
Normal file
22
src/ci/citool/templates/layout.askama
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Rust CI Test Dashboard</title>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
|
||||||
|
line-height: 1.6;
|
||||||
|
max-width: 1500px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 20px;
|
||||||
|
background: #F5F5F5;
|
||||||
|
}
|
||||||
|
{% block styles %}{% endblock %}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
{% block scripts %}{% endblock %}
|
||||||
|
</body>
|
||||||
|
</html>
|
42
src/ci/citool/templates/test_group.askama
Normal file
42
src/ci/citool/templates/test_group.askama
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
{% macro test_result(r) -%}
|
||||||
|
passed: {{ r.passed.len() }}, ignored: {{ r.ignored.len() }}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
<li>
|
||||||
|
<details>
|
||||||
|
<summary>{{ name }} ({{ test_count() }} test{{ test_count() | pluralize }}{% if !root_tests.is_empty() && root_tests.len() as u64 != test_count() -%}
|
||||||
|
, {{ root_tests.len() }} root test{{ root_tests.len() | pluralize }}
|
||||||
|
{%- endif %}{% if !groups.is_empty() -%}
|
||||||
|
, {{ groups.len() }} subdir{{ groups.len() | pluralize }}
|
||||||
|
{%- endif %})
|
||||||
|
</summary>
|
||||||
|
|
||||||
|
{% if !groups.is_empty() %}
|
||||||
|
<ul>
|
||||||
|
{% for (dir_name, subgroup) in groups %}
|
||||||
|
{{ subgroup|safe }}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if !root_tests.is_empty() %}
|
||||||
|
<ul>
|
||||||
|
{% for (name, test) in root_tests %}
|
||||||
|
<li>
|
||||||
|
{% if let Some(result) = test.single_test() %}
|
||||||
|
<b>{{ name }}</b> ({% call test_result(result) %})
|
||||||
|
{% else %}
|
||||||
|
<b>{{ name }}</b> ({{ test.revisions.len() }} revision{{ test.revisions.len() | pluralize }})
|
||||||
|
<ul>
|
||||||
|
{% for (revision, result) in test.revisions %}
|
||||||
|
<li>#<i>{{ revision }}</i> ({% call test_result(result) %})</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</details>
|
||||||
|
</li>
|
108
src/ci/citool/templates/test_suites.askama
Normal file
108
src/ci/citool/templates/test_suites.askama
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
{% extends "layout.askama" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<h1>Rust CI test dashboard</h1>
|
||||||
|
<div>
|
||||||
|
Here's how to interpret the "passed" and "ignored" counts:
|
||||||
|
the count includes all combinations of "stage" x "target" x "CI job where the test was executed or ignored".
|
||||||
|
</div>
|
||||||
|
<div class="test-suites">
|
||||||
|
<div class="summary">
|
||||||
|
<div>
|
||||||
|
<div class="test-count">Total tests: {{ test_count }}</div>
|
||||||
|
<div>
|
||||||
|
To find tests that haven't been executed anywhere, click on "Open all" and search for "passed: 0".
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<button onclick="openAll()">Open all</button>
|
||||||
|
<button onclick="closeAll()">Close all</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
{% for suite in suites.suites %}
|
||||||
|
{{ suite.group|safe }}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block styles %}
|
||||||
|
h1 {
|
||||||
|
text-align: center;
|
||||||
|
color: #333333;
|
||||||
|
margin-bottom: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.summary {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-count {
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.test-suites {
|
||||||
|
background: white;
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
li {
|
||||||
|
list-style: none;
|
||||||
|
padding-left: 20px;
|
||||||
|
}
|
||||||
|
summary {
|
||||||
|
margin-bottom: 5px;
|
||||||
|
padding: 6px;
|
||||||
|
background-color: #F4F4F4;
|
||||||
|
border: 1px solid #ddd;
|
||||||
|
border-radius: 4px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
summary:hover {
|
||||||
|
background-color: #CFCFCF;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Style the disclosure triangles */
|
||||||
|
details > summary {
|
||||||
|
list-style: none;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
details > summary::before {
|
||||||
|
content: "▶";
|
||||||
|
position: absolute;
|
||||||
|
left: -15px;
|
||||||
|
transform: rotate(0);
|
||||||
|
transition: transform 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
details[open] > summary::before {
|
||||||
|
transform: rotate(90deg);
|
||||||
|
}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
<script type="text/javascript">
|
||||||
|
function openAll() {
|
||||||
|
const details = document.getElementsByTagName("details");
|
||||||
|
for (const elem of details) {
|
||||||
|
elem.open = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function closeAll() {
|
||||||
|
const details = document.getElementsByTagName("details");
|
||||||
|
for (const elem of details) {
|
||||||
|
elem.open = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue