Auto merge of #76376 - Dylan-DPC:rollup-8chsbw9, r=Dylan-DPC
Rollup of 11 pull requests Successful merges: - #75695 (Add a regression test for issue-72793) - #75741 (Refactor byteorder to std in rustc_middle) - #75954 (Unstable Book: add links to tracking issues for FFI features) - #75994 (`impl Rc::new_cyclic`) - #76060 (Link vec doc to & reference) - #76078 (Remove disambiguators from intra doc link text) - #76082 (Fix intra-doc links on pub re-exports) - #76254 (Fold length constant in Rvalue::Repeat) - #76258 (x.py check checks tests/examples/benches) - #76263 (inliner: Check for codegen fn attributes compatibility) - #76285 (Move jointness censoring to proc_macro) Failed merges: r? @ghost
This commit is contained in:
commit
7d289aeade
31 changed files with 714 additions and 86 deletions
|
@ -3722,7 +3722,6 @@ name = "rustc_middle"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"byteorder",
|
||||
"chalk-ir",
|
||||
"measureme",
|
||||
"polonius-engine",
|
||||
|
|
|
@ -403,8 +403,8 @@ impl Cursor {
|
|||
self.index = index;
|
||||
}
|
||||
|
||||
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
|
||||
self.stream.0[self.index..].get(n).map(|(tree, _)| tree.clone())
|
||||
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
|
||||
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,15 +47,26 @@ impl ToInternal<token::DelimToken> for Delimiter {
|
|||
}
|
||||
}
|
||||
|
||||
impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
for TokenTree<Group, Punct, Ident, Literal>
|
||||
impl
|
||||
FromInternal<(
|
||||
TreeAndJoint,
|
||||
Option<&'_ tokenstream::TokenTree>,
|
||||
&'_ ParseSess,
|
||||
&'_ mut Vec<Self>,
|
||||
)> for TokenTree<Group, Punct, Ident, Literal>
|
||||
{
|
||||
fn from_internal(
|
||||
((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>),
|
||||
((tree, is_joint), look_ahead, sess, stack): (
|
||||
TreeAndJoint,
|
||||
Option<&tokenstream::TokenTree>,
|
||||
&ParseSess,
|
||||
&mut Vec<Self>,
|
||||
),
|
||||
) -> Self {
|
||||
use rustc_ast::token::*;
|
||||
|
||||
let joint = is_joint == Joint;
|
||||
let joint = is_joint == Joint
|
||||
&& matches!(look_ahead, Some(tokenstream::TokenTree::Token(t)) if t.is_op());
|
||||
let Token { kind, span } = match tree {
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||
let delimiter = Delimiter::from_internal(delim);
|
||||
|
@ -445,7 +456,8 @@ impl server::TokenStreamIter for Rustc<'_> {
|
|||
loop {
|
||||
let tree = iter.stack.pop().or_else(|| {
|
||||
let next = iter.cursor.next_with_joint()?;
|
||||
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
||||
let lookahead = iter.cursor.look_ahead(0);
|
||||
Some(TokenTree::from_internal((next, lookahead, self.sess, &mut iter.stack)))
|
||||
})?;
|
||||
// A hack used to pass AST fragments to attribute and derive macros
|
||||
// as a single nonterminal token instead of a token stream.
|
||||
|
|
|
@ -26,7 +26,6 @@ rustc_index = { path = "../rustc_index" }
|
|||
rustc_serialize = { path = "../rustc_serialize" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
byteorder = { version = "1.3" }
|
||||
chalk-ir = "0.21.0"
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
measureme = "0.7.1"
|
||||
|
|
|
@ -345,10 +345,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
|
||||
/// Reads a *non-ZST* scalar.
|
||||
///
|
||||
/// ZSTs can't be read for two reasons:
|
||||
/// * byte-order cannot work with zero-element buffers;
|
||||
/// * in order to obtain a `Pointer`, we need to check for ZSTness anyway due to integer
|
||||
/// pointers being valid for ZSTs.
|
||||
/// ZSTs can't be read because in order to obtain a `Pointer`, we need to check
|
||||
/// for ZSTness anyway due to integer pointers being valid for ZSTs.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `InterpCx::read_scalar` instead of this method.
|
||||
|
@ -397,10 +395,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
|
||||
/// Writes a *non-ZST* scalar.
|
||||
///
|
||||
/// ZSTs can't be read for two reasons:
|
||||
/// * byte-order cannot work with zero-element buffers;
|
||||
/// * in order to obtain a `Pointer`, we need to check for ZSTness anyway due to integer
|
||||
/// pointers being valid for ZSTs.
|
||||
/// ZSTs can't be read because in order to obtain a `Pointer`, we need to check
|
||||
/// for ZSTness anyway due to integer pointers being valid for ZSTs.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `InterpCx::write_scalar` instead of this method.
|
||||
|
|
|
@ -98,10 +98,10 @@ mod value;
|
|||
use std::convert::TryFrom;
|
||||
use std::fmt;
|
||||
use std::io;
|
||||
use std::io::{Read, Write};
|
||||
use std::num::NonZeroU32;
|
||||
use std::sync::atomic::{AtomicU32, Ordering};
|
||||
|
||||
use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use rustc_ast::LitKind;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::{HashMapExt, Lock};
|
||||
|
@ -561,19 +561,33 @@ pub fn write_target_uint(
|
|||
mut target: &mut [u8],
|
||||
data: u128,
|
||||
) -> Result<(), io::Error> {
|
||||
let len = target.len();
|
||||
// This u128 holds an "any-size uint" (since smaller uints can fits in it)
|
||||
// So we do not write all bytes of the u128, just the "payload".
|
||||
match endianness {
|
||||
Endian::Little => target.write_uint128::<LittleEndian>(data, len),
|
||||
Endian::Big => target.write_uint128::<BigEndian>(data, len),
|
||||
}
|
||||
Endian::Little => target.write(&data.to_le_bytes())?,
|
||||
Endian::Big => target.write(&data.to_be_bytes()[16 - target.len()..])?,
|
||||
};
|
||||
debug_assert!(target.len() == 0); // We should have filled the target buffer.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
|
||||
match endianness {
|
||||
Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
|
||||
Endian::Big => source.read_uint128::<BigEndian>(source.len()),
|
||||
}
|
||||
// This u128 holds an "any-size uint" (since smaller uints can fits in it)
|
||||
let mut buf = [0u8; std::mem::size_of::<u128>()];
|
||||
// So we do not read exactly 16 bytes into the u128, just the "payload".
|
||||
let uint = match endianness {
|
||||
Endian::Little => {
|
||||
source.read(&mut buf)?;
|
||||
Ok(u128::from_le_bytes(buf))
|
||||
}
|
||||
Endian::Big => {
|
||||
source.read(&mut buf[16 - source.len()..])?;
|
||||
Ok(u128::from_be_bytes(buf))
|
||||
}
|
||||
};
|
||||
debug_assert!(source.len() == 0); // We should have consumed the source buffer.
|
||||
uint
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -175,7 +175,7 @@ impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
|
|||
use crate::mir::Rvalue::*;
|
||||
match *self {
|
||||
Use(ref op) => Use(op.fold_with(folder)),
|
||||
Repeat(ref op, len) => Repeat(op.fold_with(folder), len),
|
||||
Repeat(ref op, len) => Repeat(op.fold_with(folder), len.fold_with(folder)),
|
||||
ThreadLocalRef(did) => ThreadLocalRef(did.fold_with(folder)),
|
||||
Ref(region, bk, ref place) => {
|
||||
Ref(region.fold_with(folder), bk, place.fold_with(folder))
|
||||
|
|
|
@ -4,7 +4,7 @@ use rustc_attr as attr;
|
|||
use rustc_hir::def_id::DefId;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
|
||||
use rustc_middle::mir::visit::*;
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::ty::subst::{Subst, SubstsRef};
|
||||
|
@ -45,7 +45,8 @@ impl<'tcx> MirPass<'tcx> for Inline {
|
|||
// based function.
|
||||
debug!("function inlining is disabled when compiling with `instrument_coverage`");
|
||||
} else {
|
||||
Inliner { tcx, source }.run_pass(body);
|
||||
Inliner { tcx, source, codegen_fn_attrs: tcx.codegen_fn_attrs(source.def_id()) }
|
||||
.run_pass(body);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,6 +55,7 @@ impl<'tcx> MirPass<'tcx> for Inline {
|
|||
struct Inliner<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
source: MirSource<'tcx>,
|
||||
codegen_fn_attrs: &'tcx CodegenFnAttrs,
|
||||
}
|
||||
|
||||
impl Inliner<'tcx> {
|
||||
|
@ -242,9 +244,19 @@ impl Inliner<'tcx> {
|
|||
return false;
|
||||
}
|
||||
|
||||
// Avoid inlining functions marked as no_sanitize if sanitizer is enabled,
|
||||
// since instrumentation might be enabled and performed on the caller.
|
||||
if self.tcx.sess.opts.debugging_opts.sanitizer.intersects(codegen_fn_attrs.no_sanitize) {
|
||||
let self_features = &self.codegen_fn_attrs.target_features;
|
||||
let callee_features = &codegen_fn_attrs.target_features;
|
||||
if callee_features.iter().any(|feature| !self_features.contains(feature)) {
|
||||
debug!("`callee has extra target features - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
let self_no_sanitize =
|
||||
self.codegen_fn_attrs.no_sanitize & self.tcx.sess.opts.debugging_opts.sanitizer;
|
||||
let callee_no_sanitize =
|
||||
codegen_fn_attrs.no_sanitize & self.tcx.sess.opts.debugging_opts.sanitizer;
|
||||
if self_no_sanitize != callee_no_sanitize {
|
||||
debug!("`callee has incompatible no_sanitize attribute - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -262,10 +262,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
}
|
||||
_ => {
|
||||
let tt = TokenTree::Token(self.token.take());
|
||||
let mut is_joint = self.bump();
|
||||
if !self.token.is_op() {
|
||||
is_joint = NonJoint;
|
||||
}
|
||||
let is_joint = self.bump();
|
||||
Ok((tt, is_joint))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -822,15 +822,15 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
let frame = &self.token_cursor.frame;
|
||||
looker(&match frame.tree_cursor.look_ahead(dist - 1) {
|
||||
match frame.tree_cursor.look_ahead(dist - 1) {
|
||||
Some(tree) => match tree {
|
||||
TokenTree::Token(token) => token,
|
||||
TokenTree::Token(token) => looker(token),
|
||||
TokenTree::Delimited(dspan, delim, _) => {
|
||||
Token::new(token::OpenDelim(delim), dspan.open)
|
||||
looker(&Token::new(token::OpenDelim(delim.clone()), dspan.open))
|
||||
}
|
||||
},
|
||||
None => Token::new(token::CloseDelim(frame.delim), frame.span.close),
|
||||
})
|
||||
None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
|
||||
|
|
|
@ -325,6 +325,50 @@ impl<T> Rc<T> {
|
|||
)
|
||||
}
|
||||
|
||||
/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
|
||||
/// to upgrade the weak reference before this function returns will result
|
||||
/// in a `None` value. However, the weak reference may be cloned freely and
|
||||
/// stored for use at a later time.
|
||||
#[unstable(feature = "arc_new_cyclic", issue = "75861")]
|
||||
pub fn new_cyclic(data_fn: impl FnOnce(&Weak<T>) -> T) -> Rc<T> {
|
||||
// Construct the inner in the "uninitialized" state with a single
|
||||
// weak reference.
|
||||
let uninit_ptr: NonNull<_> = Box::leak(box RcBox {
|
||||
strong: Cell::new(0),
|
||||
weak: Cell::new(1),
|
||||
value: mem::MaybeUninit::<T>::uninit(),
|
||||
})
|
||||
.into();
|
||||
|
||||
let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
|
||||
|
||||
let weak = Weak { ptr: init_ptr };
|
||||
|
||||
// It's important we don't give up ownership of the weak pointer, or
|
||||
// else the memory might be freed by the time `data_fn` returns. If
|
||||
// we really wanted to pass ownership, we could create an additional
|
||||
// weak pointer for ourselves, but this would result in additional
|
||||
// updates to the weak reference count which might not be necessary
|
||||
// otherwise.
|
||||
let data = data_fn(&weak);
|
||||
|
||||
unsafe {
|
||||
let inner = init_ptr.as_ptr();
|
||||
ptr::write(&raw mut (*inner).value, data);
|
||||
|
||||
let prev_value = (*inner).strong.get();
|
||||
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
|
||||
(*inner).strong.set(1);
|
||||
}
|
||||
|
||||
let strong = Rc::from_inner(init_ptr);
|
||||
|
||||
// Strong references should collectively own a shared weak reference,
|
||||
// so don't run the destructor for our old weak reference.
|
||||
mem::forget(weak);
|
||||
strong
|
||||
}
|
||||
|
||||
/// Constructs a new `Rc` with uninitialized contents.
|
||||
///
|
||||
/// # Examples
|
||||
|
|
|
@ -434,3 +434,69 @@ fn test_array_from_slice() {
|
|||
let a: Result<Rc<[u32; 2]>, _> = r.clone().try_into();
|
||||
assert!(a.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rc_cyclic_with_zero_refs() {
|
||||
struct ZeroRefs {
|
||||
inner: Weak<ZeroRefs>,
|
||||
}
|
||||
|
||||
let zero_refs = Rc::new_cyclic(|inner| {
|
||||
assert_eq!(inner.strong_count(), 0);
|
||||
assert!(inner.upgrade().is_none());
|
||||
ZeroRefs { inner: Weak::new() }
|
||||
});
|
||||
|
||||
assert_eq!(Rc::strong_count(&zero_refs), 1);
|
||||
assert_eq!(Rc::weak_count(&zero_refs), 0);
|
||||
assert_eq!(zero_refs.inner.strong_count(), 0);
|
||||
assert_eq!(zero_refs.inner.weak_count(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rc_cyclic_with_one_ref() {
|
||||
struct OneRef {
|
||||
inner: Weak<OneRef>,
|
||||
}
|
||||
|
||||
let one_ref = Rc::new_cyclic(|inner| {
|
||||
assert_eq!(inner.strong_count(), 0);
|
||||
assert!(inner.upgrade().is_none());
|
||||
OneRef { inner: inner.clone() }
|
||||
});
|
||||
|
||||
assert_eq!(Rc::strong_count(&one_ref), 1);
|
||||
assert_eq!(Rc::weak_count(&one_ref), 1);
|
||||
|
||||
let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap();
|
||||
assert!(Rc::ptr_eq(&one_ref, &one_ref2));
|
||||
|
||||
assert_eq!(one_ref.inner.strong_count(), 2);
|
||||
assert_eq!(one_ref.inner.weak_count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rc_cyclic_with_two_ref() {
|
||||
struct TwoRefs {
|
||||
inner: Weak<TwoRefs>,
|
||||
inner1: Weak<TwoRefs>,
|
||||
}
|
||||
|
||||
let two_refs = Rc::new_cyclic(|inner| {
|
||||
assert_eq!(inner.strong_count(), 0);
|
||||
assert!(inner.upgrade().is_none());
|
||||
TwoRefs { inner: inner.clone(), inner1: inner.clone() }
|
||||
});
|
||||
|
||||
assert_eq!(Rc::strong_count(&two_refs), 1);
|
||||
assert_eq!(Rc::weak_count(&two_refs), 2);
|
||||
|
||||
let two_ref3 = Weak::upgrade(&two_refs.inner).unwrap();
|
||||
assert!(Rc::ptr_eq(&two_refs, &two_ref3));
|
||||
|
||||
let two_ref2 = Weak::upgrade(&two_refs.inner1).unwrap();
|
||||
assert!(Rc::ptr_eq(&two_refs, &two_ref2));
|
||||
|
||||
assert_eq!(Rc::strong_count(&two_refs), 3);
|
||||
assert_eq!(Rc::weak_count(&two_refs), 2);
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ use crate::raw_vec::RawVec;
|
|||
/// # Slicing
|
||||
///
|
||||
/// A `Vec` can be mutable. Slices, on the other hand, are read-only objects.
|
||||
/// To get a slice, use `&`. Example:
|
||||
/// To get a [slice], use [`&`]. Example:
|
||||
///
|
||||
/// ```
|
||||
/// fn read_slice(slice: &[usize]) {
|
||||
|
@ -287,6 +287,8 @@ use crate::raw_vec::RawVec;
|
|||
/// [`insert`]: Vec::insert
|
||||
/// [`reserve`]: Vec::reserve
|
||||
/// [owned slice]: Box
|
||||
/// [slice]: ../../std/primitive.slice.html
|
||||
/// [`&`]: ../../std/primitive.reference.html
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")]
|
||||
pub struct Vec<T> {
|
||||
|
|
|
@ -382,7 +382,7 @@ impl<'a> Builder<'a> {
|
|||
native::Lld
|
||||
),
|
||||
Kind::Check | Kind::Clippy | Kind::Fix | Kind::Format => {
|
||||
describe!(check::Std, check::Rustc, check::Rustdoc, check::Clippy)
|
||||
describe!(check::Std, check::Rustc, check::Rustdoc, check::Clippy, check::Bootstrap)
|
||||
}
|
||||
Kind::Test => describe!(
|
||||
crate::toolstate::ToolStateCheck,
|
||||
|
|
|
@ -66,6 +66,43 @@ impl Step for Std {
|
|||
let libdir = builder.sysroot_libdir(compiler, target);
|
||||
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
|
||||
add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
|
||||
|
||||
// Then run cargo again, once we've put the rmeta files for the library
|
||||
// crates into the sysroot. This is needed because e.g., core's tests
|
||||
// depend on `libtest` -- Cargo presumes it will exist, but it doesn't
|
||||
// since we initialize with an empty sysroot.
|
||||
//
|
||||
// Currently only the "libtest" tree of crates does this.
|
||||
|
||||
let mut cargo = builder.cargo(
|
||||
compiler,
|
||||
Mode::Std,
|
||||
SourceType::InTree,
|
||||
target,
|
||||
cargo_subcommand(builder.kind),
|
||||
);
|
||||
std_cargo(builder, target, compiler.stage, &mut cargo);
|
||||
cargo.arg("--all-targets");
|
||||
|
||||
// Explicitly pass -p for all dependencies krates -- this will force cargo
|
||||
// to also check the tests/benches/examples for these crates, rather
|
||||
// than just the leaf crate.
|
||||
for krate in builder.in_tree_crates("test") {
|
||||
cargo.arg("-p").arg(krate.name);
|
||||
}
|
||||
|
||||
builder.info(&format!(
|
||||
"Checking std test/bench/example targets ({} -> {})",
|
||||
&compiler.host, target
|
||||
));
|
||||
run_cargo(
|
||||
builder,
|
||||
cargo,
|
||||
args(builder.kind),
|
||||
&libstd_test_stamp(builder, compiler, target),
|
||||
vec![],
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,6 +143,14 @@ impl Step for Rustc {
|
|||
cargo_subcommand(builder.kind),
|
||||
);
|
||||
rustc_cargo(builder, &mut cargo, target);
|
||||
cargo.arg("--all-targets");
|
||||
|
||||
// Explicitly pass -p for all compiler krates -- this will force cargo
|
||||
// to also check the tests/benches/examples for these crates, rather
|
||||
// than just the leaf crate.
|
||||
for krate in builder.in_tree_crates("rustc-main") {
|
||||
cargo.arg("-p").arg(krate.name);
|
||||
}
|
||||
|
||||
builder.info(&format!("Checking compiler artifacts ({} -> {})", &compiler.host, target));
|
||||
run_cargo(
|
||||
|
@ -149,7 +194,7 @@ macro_rules! tool_check_step {
|
|||
|
||||
builder.ensure(Rustc { target });
|
||||
|
||||
let cargo = prepare_tool_cargo(
|
||||
let mut cargo = prepare_tool_cargo(
|
||||
builder,
|
||||
compiler,
|
||||
Mode::ToolRustc,
|
||||
|
@ -160,12 +205,14 @@ macro_rules! tool_check_step {
|
|||
&[],
|
||||
);
|
||||
|
||||
println!(
|
||||
cargo.arg("--all-targets");
|
||||
|
||||
builder.info(&format!(
|
||||
"Checking {} artifacts ({} -> {})",
|
||||
stringify!($name).to_lowercase(),
|
||||
&compiler.host.triple,
|
||||
target.triple
|
||||
);
|
||||
));
|
||||
run_cargo(
|
||||
builder,
|
||||
cargo,
|
||||
|
@ -202,12 +249,24 @@ tool_check_step!(Rustdoc, "src/tools/rustdoc", SourceType::InTree);
|
|||
// rejected.
|
||||
tool_check_step!(Clippy, "src/tools/clippy", SourceType::InTree);
|
||||
|
||||
tool_check_step!(Bootstrap, "src/bootstrap", SourceType::InTree);
|
||||
|
||||
/// Cargo's output path for the standard library in a given stage, compiled
|
||||
/// by a particular compiler for the specified target.
|
||||
fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf {
|
||||
builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp")
|
||||
}
|
||||
|
||||
/// Cargo's output path for the standard library in a given stage, compiled
|
||||
/// by a particular compiler for the specified target.
|
||||
fn libstd_test_stamp(
|
||||
builder: &Builder<'_>,
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
) -> PathBuf {
|
||||
builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check-test.stamp")
|
||||
}
|
||||
|
||||
/// Cargo's output path for librustc in a given stage, compiled by a particular
|
||||
/// compiler for the specified target.
|
||||
fn librustc_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf {
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
# `ffi_const`
|
||||
|
||||
The tracking issue for this feature is: [#58328]
|
||||
|
||||
------
|
||||
|
||||
The `#[ffi_const]` attribute applies clang's `const` attribute to foreign
|
||||
functions declarations.
|
||||
|
||||
|
@ -42,6 +46,7 @@ implemented in this way on all of them. It is therefore also worth verifying
|
|||
that the semantics of the C toolchain used to compile the binary being linked
|
||||
against are compatible with those of the `#[ffi_const]`.
|
||||
|
||||
[#58328]: https://github.com/rust-lang/rust/issues/58328
|
||||
[ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacgigch.html
|
||||
[GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-const-function-attribute
|
||||
[IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_const.htm
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
# `ffi_pure`
|
||||
|
||||
The tracking issue for this feature is: [#58329]
|
||||
|
||||
------
|
||||
|
||||
The `#[ffi_pure]` attribute applies clang's `pure` attribute to foreign
|
||||
functions declarations.
|
||||
|
||||
|
@ -46,6 +50,7 @@ that the semantics of the C toolchain used to compile the binary being linked
|
|||
against are compatible with those of the `#[ffi_pure]`.
|
||||
|
||||
|
||||
[#58329]: https://github.com/rust-lang/rust/issues/58329
|
||||
[ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacigdac.html
|
||||
[GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-pure-function-attribute
|
||||
[IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_pure.htm
|
||||
|
|
|
@ -118,7 +118,7 @@ impl Item {
|
|||
self.attrs.collapsed_doc_value()
|
||||
}
|
||||
|
||||
pub fn links(&self) -> Vec<(String, String)> {
|
||||
pub fn links(&self) -> Vec<RenderedLink> {
|
||||
self.attrs.links(&self.def_id.krate)
|
||||
}
|
||||
|
||||
|
@ -425,10 +425,38 @@ pub struct Attributes {
|
|||
pub cfg: Option<Arc<Cfg>>,
|
||||
pub span: Option<rustc_span::Span>,
|
||||
/// map from Rust paths to resolved defs and potential URL fragments
|
||||
pub links: Vec<(String, Option<DefId>, Option<String>)>,
|
||||
pub links: Vec<ItemLink>,
|
||||
pub inner_docs: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
|
||||
/// A link that has not yet been rendered.
|
||||
///
|
||||
/// This link will be turned into a rendered link by [`Attributes::links`]
|
||||
pub struct ItemLink {
|
||||
/// The original link written in the markdown
|
||||
pub(crate) link: String,
|
||||
/// The link text displayed in the HTML.
|
||||
///
|
||||
/// This may not be the same as `link` if there was a disambiguator
|
||||
/// in an intra-doc link (e.g. \[`fn@f`\])
|
||||
pub(crate) link_text: String,
|
||||
pub(crate) did: Option<DefId>,
|
||||
/// The url fragment to append to the link
|
||||
pub(crate) fragment: Option<String>,
|
||||
}
|
||||
|
||||
pub struct RenderedLink {
|
||||
/// The text the link was original written as.
|
||||
///
|
||||
/// This could potentially include disambiguators and backticks.
|
||||
pub(crate) original_text: String,
|
||||
/// The text to display in the HTML
|
||||
pub(crate) new_text: String,
|
||||
/// The URL to put in the `href`
|
||||
pub(crate) href: String,
|
||||
}
|
||||
|
||||
impl Attributes {
|
||||
/// Extracts the content from an attribute `#[doc(cfg(content))]`.
|
||||
pub fn extract_cfg(mi: &ast::MetaItem) -> Option<&ast::MetaItem> {
|
||||
|
@ -605,21 +633,25 @@ impl Attributes {
|
|||
/// Gets links as a vector
|
||||
///
|
||||
/// Cache must be populated before call
|
||||
pub fn links(&self, krate: &CrateNum) -> Vec<(String, String)> {
|
||||
pub fn links(&self, krate: &CrateNum) -> Vec<RenderedLink> {
|
||||
use crate::html::format::href;
|
||||
use crate::html::render::CURRENT_DEPTH;
|
||||
|
||||
self.links
|
||||
.iter()
|
||||
.filter_map(|&(ref s, did, ref fragment)| {
|
||||
match did {
|
||||
.filter_map(|ItemLink { link: s, link_text, did, fragment }| {
|
||||
match *did {
|
||||
Some(did) => {
|
||||
if let Some((mut href, ..)) = href(did) {
|
||||
if let Some(ref fragment) = *fragment {
|
||||
href.push_str("#");
|
||||
href.push_str(fragment);
|
||||
}
|
||||
Some((s.clone(), href))
|
||||
Some(RenderedLink {
|
||||
original_text: s.clone(),
|
||||
new_text: link_text.clone(),
|
||||
href,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -639,16 +671,17 @@ impl Attributes {
|
|||
};
|
||||
// This is a primitive so the url is done "by hand".
|
||||
let tail = fragment.find('#').unwrap_or_else(|| fragment.len());
|
||||
Some((
|
||||
s.clone(),
|
||||
format!(
|
||||
Some(RenderedLink {
|
||||
original_text: s.clone(),
|
||||
new_text: link_text.clone(),
|
||||
href: format!(
|
||||
"{}{}std/primitive.{}.html{}",
|
||||
url,
|
||||
if !url.ends_with('/') { "/" } else { "" },
|
||||
&fragment[..tail],
|
||||
&fragment[tail..]
|
||||
),
|
||||
))
|
||||
})
|
||||
} else {
|
||||
panic!("This isn't a primitive?!");
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ use std::fmt::Write;
|
|||
use std::ops::Range;
|
||||
use std::str;
|
||||
|
||||
use crate::clean::RenderedLink;
|
||||
use crate::doctest;
|
||||
use crate::html::highlight;
|
||||
use crate::html::toc::TocBuilder;
|
||||
|
@ -52,7 +53,7 @@ fn opts() -> Options {
|
|||
pub struct Markdown<'a>(
|
||||
pub &'a str,
|
||||
/// A list of link replacements.
|
||||
pub &'a [(String, String)],
|
||||
pub &'a [RenderedLink],
|
||||
/// The current list of used header IDs.
|
||||
pub &'a mut IdMap,
|
||||
/// Whether to allow the use of explicit error codes in doctest lang strings.
|
||||
|
@ -78,7 +79,7 @@ pub struct MarkdownHtml<'a>(
|
|||
pub &'a Option<Playground>,
|
||||
);
|
||||
/// A tuple struct like `Markdown` that renders only the first paragraph.
|
||||
pub struct MarkdownSummaryLine<'a>(pub &'a str, pub &'a [(String, String)]);
|
||||
pub struct MarkdownSummaryLine<'a>(pub &'a str, pub &'a [RenderedLink]);
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
pub enum ErrorCodes {
|
||||
|
@ -337,31 +338,107 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
|
|||
}
|
||||
|
||||
/// Make headings links with anchor IDs and build up TOC.
|
||||
struct LinkReplacer<'a, 'b, I: Iterator<Item = Event<'a>>> {
|
||||
struct LinkReplacer<'a, I: Iterator<Item = Event<'a>>> {
|
||||
inner: I,
|
||||
links: &'b [(String, String)],
|
||||
links: &'a [RenderedLink],
|
||||
shortcut_link: Option<&'a RenderedLink>,
|
||||
}
|
||||
|
||||
impl<'a, 'b, I: Iterator<Item = Event<'a>>> LinkReplacer<'a, 'b, I> {
|
||||
fn new(iter: I, links: &'b [(String, String)]) -> Self {
|
||||
LinkReplacer { inner: iter, links }
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> LinkReplacer<'a, I> {
|
||||
fn new(iter: I, links: &'a [RenderedLink]) -> Self {
|
||||
LinkReplacer { inner: iter, links, shortcut_link: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, 'b, I> {
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
|
||||
type Item = Event<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let event = self.inner.next();
|
||||
if let Some(Event::Start(Tag::Link(kind, dest, text))) = event {
|
||||
if let Some(&(_, ref replace)) = self.links.iter().find(|link| link.0 == *dest) {
|
||||
Some(Event::Start(Tag::Link(kind, replace.to_owned().into(), text)))
|
||||
} else {
|
||||
Some(Event::Start(Tag::Link(kind, dest, text)))
|
||||
use pulldown_cmark::LinkType;
|
||||
|
||||
let mut event = self.inner.next();
|
||||
|
||||
// Replace intra-doc links and remove disambiguators from shortcut links (`[fn@f]`).
|
||||
match &mut event {
|
||||
// This is a shortcut link that was resolved by the broken_link_callback: `[fn@f]`
|
||||
// Remove any disambiguator.
|
||||
Some(Event::Start(Tag::Link(
|
||||
// [fn@f] or [fn@f][]
|
||||
LinkType::ShortcutUnknown | LinkType::CollapsedUnknown,
|
||||
dest,
|
||||
title,
|
||||
))) => {
|
||||
debug!("saw start of shortcut link to {} with title {}", dest, title);
|
||||
// If this is a shortcut link, it was resolved by the broken_link_callback.
|
||||
// So the URL will already be updated properly.
|
||||
let link = self.links.iter().find(|&link| *link.href == **dest);
|
||||
// Since this is an external iterator, we can't replace the inner text just yet.
|
||||
// Store that we saw a link so we know to replace it later.
|
||||
if let Some(link) = link {
|
||||
trace!("it matched");
|
||||
assert!(self.shortcut_link.is_none(), "shortcut links cannot be nested");
|
||||
self.shortcut_link = Some(link);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
event
|
||||
// Now that we're done with the shortcut link, don't replace any more text.
|
||||
Some(Event::End(Tag::Link(
|
||||
LinkType::ShortcutUnknown | LinkType::CollapsedUnknown,
|
||||
dest,
|
||||
_,
|
||||
))) => {
|
||||
debug!("saw end of shortcut link to {}", dest);
|
||||
if self.links.iter().find(|&link| *link.href == **dest).is_some() {
|
||||
assert!(self.shortcut_link.is_some(), "saw closing link without opening tag");
|
||||
self.shortcut_link = None;
|
||||
}
|
||||
}
|
||||
// Handle backticks in inline code blocks, but only if we're in the middle of a shortcut link.
|
||||
// [`fn@f`]
|
||||
Some(Event::Code(text)) => {
|
||||
trace!("saw code {}", text);
|
||||
if let Some(link) = self.shortcut_link {
|
||||
trace!("original text was {}", link.original_text);
|
||||
// NOTE: this only replaces if the code block is the *entire* text.
|
||||
// If only part of the link has code highlighting, the disambiguator will not be removed.
|
||||
// e.g. [fn@`f`]
|
||||
// This is a limitation from `collect_intra_doc_links`: it passes a full link,
|
||||
// and does not distinguish at all between code blocks.
|
||||
// So we could never be sure we weren't replacing too much:
|
||||
// [fn@my_`f`unc] is treated the same as [my_func()] in that pass.
|
||||
//
|
||||
// NOTE: &[1..len() - 1] is to strip the backticks
|
||||
if **text == link.original_text[1..link.original_text.len() - 1] {
|
||||
debug!("replacing {} with {}", text, link.new_text);
|
||||
*text = CowStr::Borrowed(&link.new_text);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Replace plain text in links, but only in the middle of a shortcut link.
|
||||
// [fn@f]
|
||||
Some(Event::Text(text)) => {
|
||||
trace!("saw text {}", text);
|
||||
if let Some(link) = self.shortcut_link {
|
||||
trace!("original text was {}", link.original_text);
|
||||
// NOTE: same limitations as `Event::Code`
|
||||
if **text == *link.original_text {
|
||||
debug!("replacing {} with {}", text, link.new_text);
|
||||
*text = CowStr::Borrowed(&link.new_text);
|
||||
}
|
||||
}
|
||||
}
|
||||
// If this is a link, but not a shortcut link,
|
||||
// replace the URL, since the broken_link_callback was not called.
|
||||
Some(Event::Start(Tag::Link(_, dest, _))) => {
|
||||
if let Some(link) = self.links.iter().find(|&link| *link.original_text == **dest) {
|
||||
*dest = CowStr::Borrowed(link.href.as_ref());
|
||||
}
|
||||
}
|
||||
// Anything else couldn't have been a valid Rust path, so no need to replace the text.
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Yield the modified event
|
||||
event
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -855,8 +932,8 @@ impl Markdown<'_> {
|
|||
return String::new();
|
||||
}
|
||||
let replacer = |_: &str, s: &str| {
|
||||
if let Some(&(_, ref replace)) = links.iter().find(|link| &*link.0 == s) {
|
||||
Some((replace.clone(), s.to_owned()))
|
||||
if let Some(link) = links.iter().find(|link| &*link.original_text == s) {
|
||||
Some((link.href.clone(), link.new_text.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -933,8 +1010,8 @@ impl MarkdownSummaryLine<'_> {
|
|||
}
|
||||
|
||||
let replacer = |_: &str, s: &str| {
|
||||
if let Some(&(_, ref replace)) = links.iter().find(|link| &*link.0 == s) {
|
||||
Some((replace.clone(), s.to_owned()))
|
||||
if let Some(link) = links.iter().find(|link| &*link.original_text == s) {
|
||||
Some((link.href.clone(), link.new_text.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -63,9 +63,8 @@ use rustc_span::symbol::{sym, Symbol};
|
|||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use crate::clean::{self, AttributesExt, Deprecation, GetDefId, SelfTy, TypeKind};
|
||||
use crate::config::RenderInfo;
|
||||
use crate::config::RenderOptions;
|
||||
use crate::clean::{self, AttributesExt, Deprecation, GetDefId, RenderedLink, SelfTy, TypeKind};
|
||||
use crate::config::{RenderInfo, RenderOptions};
|
||||
use crate::docfs::{DocFS, PathError};
|
||||
use crate::doctree;
|
||||
use crate::error::Error;
|
||||
|
@ -1774,7 +1773,7 @@ fn render_markdown(
|
|||
w: &mut Buffer,
|
||||
cx: &Context,
|
||||
md_text: &str,
|
||||
links: Vec<(String, String)>,
|
||||
links: Vec<RenderedLink>,
|
||||
prefix: &str,
|
||||
is_hidden: bool,
|
||||
) {
|
||||
|
|
|
@ -582,6 +582,9 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
let parent_node = if item.is_fake() {
|
||||
// FIXME: is this correct?
|
||||
None
|
||||
// If we're documenting the crate root itself, it has no parent. Use the root instead.
|
||||
} else if item.def_id.is_top_level_module() {
|
||||
Some(item.def_id)
|
||||
} else {
|
||||
let mut current = item.def_id;
|
||||
// The immediate parent might not always be a module.
|
||||
|
@ -593,6 +596,12 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
}
|
||||
current = parent;
|
||||
} else {
|
||||
debug!(
|
||||
"{:?} has no parent (kind={:?}, original was {:?})",
|
||||
current,
|
||||
self.cx.tcx.def_kind(current),
|
||||
item.def_id
|
||||
);
|
||||
break None;
|
||||
}
|
||||
}
|
||||
|
@ -697,11 +706,12 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
// This is an anchor to an element of the current page, nothing to do in here!
|
||||
continue;
|
||||
}
|
||||
(parts[0].to_owned(), Some(parts[1].to_owned()))
|
||||
(parts[0], Some(parts[1].to_owned()))
|
||||
} else {
|
||||
(parts[0].to_owned(), None)
|
||||
(parts[0], None)
|
||||
};
|
||||
let resolved_self;
|
||||
let link_text;
|
||||
let mut path_str;
|
||||
let disambiguator;
|
||||
let (mut res, mut fragment) = {
|
||||
|
@ -718,6 +728,12 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
continue;
|
||||
}
|
||||
|
||||
// We stripped `()` and `!` when parsing the disambiguator.
|
||||
// Add them back to be displayed, but not prefix disambiguators.
|
||||
link_text = disambiguator
|
||||
.map(|d| d.display_for(path_str))
|
||||
.unwrap_or_else(|| path_str.to_owned());
|
||||
|
||||
// In order to correctly resolve intra-doc-links we need to
|
||||
// pick a base AST node to work from. If the documentation for
|
||||
// this module came from an inner comment (//!) then we anchor
|
||||
|
@ -906,7 +922,12 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
if let Res::PrimTy(_) = res {
|
||||
match disambiguator {
|
||||
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {
|
||||
item.attrs.links.push((ori_link, None, fragment))
|
||||
item.attrs.links.push(ItemLink {
|
||||
link: ori_link,
|
||||
link_text: path_str.to_owned(),
|
||||
did: None,
|
||||
fragment,
|
||||
});
|
||||
}
|
||||
Some(other) => {
|
||||
report_mismatch(other, Disambiguator::Primitive);
|
||||
|
@ -957,7 +978,12 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
let id = register_res(cx, res);
|
||||
item.attrs.links.push((ori_link, Some(id), fragment));
|
||||
item.attrs.links.push(ItemLink {
|
||||
link: ori_link,
|
||||
link_text,
|
||||
did: Some(id),
|
||||
fragment,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -985,6 +1011,18 @@ enum Disambiguator {
|
|||
}
|
||||
|
||||
impl Disambiguator {
|
||||
/// The text that should be displayed when the path is rendered as HTML.
|
||||
///
|
||||
/// NOTE: `path` is not the original link given by the user, but a name suitable for passing to `resolve`.
|
||||
fn display_for(&self, path: &str) -> String {
|
||||
match self {
|
||||
// FIXME: this will have different output if the user had `m!()` originally.
|
||||
Self::Kind(DefKind::Macro(MacroKind::Bang)) => format!("{}!", path),
|
||||
Self::Kind(DefKind::Fn) => format!("{}()", path),
|
||||
_ => path.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
/// (disambiguator, path_str)
|
||||
fn from_str(link: &str) -> Result<(Self, &str), ()> {
|
||||
use Disambiguator::{Kind, Namespace as NS, Primitive};
|
||||
|
@ -1037,7 +1075,7 @@ impl Disambiguator {
|
|||
}
|
||||
|
||||
/// Return (description of the change, suggestion)
|
||||
fn display_for(self, path_str: &str) -> (&'static str, String) {
|
||||
fn suggestion_for(self, path_str: &str) -> (&'static str, String) {
|
||||
const PREFIX: &str = "prefix with the item kind";
|
||||
const FUNCTION: &str = "add parentheses";
|
||||
const MACRO: &str = "add an exclamation mark";
|
||||
|
@ -1292,7 +1330,7 @@ fn suggest_disambiguator(
|
|||
sp: Option<rustc_span::Span>,
|
||||
link_range: &Option<Range<usize>>,
|
||||
) {
|
||||
let (action, mut suggestion) = disambiguator.display_for(path_str);
|
||||
let (action, mut suggestion) = disambiguator.suggestion_for(path_str);
|
||||
let help = format!("to link to the {}, {}", disambiguator.descr(), action);
|
||||
|
||||
if let Some(sp) = sp {
|
||||
|
|
39
src/test/mir-opt/inline/inline-compatibility.rs
Normal file
39
src/test/mir-opt/inline/inline-compatibility.rs
Normal file
|
@ -0,0 +1,39 @@
|
|||
// Checks that only functions with compatible attributes are inlined.
|
||||
//
|
||||
// only-x86_64
|
||||
// needs-sanitizer-address
|
||||
// compile-flags: -Zsanitizer=address
|
||||
|
||||
#![crate_type = "lib"]
|
||||
#![feature(no_sanitize)]
|
||||
#![feature(target_feature_11)]
|
||||
|
||||
// EMIT_MIR inline_compatibility.inlined_target_feature.Inline.diff
|
||||
#[target_feature(enable = "sse2")]
|
||||
pub unsafe fn inlined_target_feature() {
|
||||
target_feature();
|
||||
}
|
||||
|
||||
// EMIT_MIR inline_compatibility.not_inlined_target_feature.Inline.diff
|
||||
pub unsafe fn not_inlined_target_feature() {
|
||||
target_feature();
|
||||
}
|
||||
|
||||
// EMIT_MIR inline_compatibility.inlined_no_sanitize.Inline.diff
|
||||
#[no_sanitize(address)]
|
||||
pub unsafe fn inlined_no_sanitize() {
|
||||
no_sanitize();
|
||||
}
|
||||
|
||||
// EMIT_MIR inline_compatibility.not_inlined_no_sanitize.Inline.diff
|
||||
pub unsafe fn not_inlined_no_sanitize() {
|
||||
no_sanitize();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[target_feature(enable = "sse2")]
|
||||
pub unsafe fn target_feature() {}
|
||||
|
||||
#[inline]
|
||||
#[no_sanitize(address, memory)]
|
||||
pub unsafe fn no_sanitize() {}
|
|
@ -0,0 +1,25 @@
|
|||
- // MIR for `inlined_no_sanitize` before Inline
|
||||
+ // MIR for `inlined_no_sanitize` after Inline
|
||||
|
||||
fn inlined_no_sanitize() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-compatibility.rs:24:37: 24:37
|
||||
let _1: (); // in scope 0 at $DIR/inline-compatibility.rs:25:5: 25:18
|
||||
+ scope 1 {
|
||||
+ }
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inline-compatibility.rs:25:5: 25:18
|
||||
- _1 = no_sanitize() -> bb1; // scope 0 at $DIR/inline-compatibility.rs:25:5: 25:18
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/inline-compatibility.rs:25:5: 25:16
|
||||
- // + literal: Const { ty: unsafe fn() {no_sanitize}, val: Value(Scalar(<ZST>)) }
|
||||
- }
|
||||
-
|
||||
- bb1: {
|
||||
+ _1 = const (); // scope 1 at $DIR/inline-compatibility.rs:39:29: 39:31
|
||||
StorageDead(_1); // scope 0 at $DIR/inline-compatibility.rs:25:18: 25:19
|
||||
_0 = const (); // scope 0 at $DIR/inline-compatibility.rs:24:37: 26:2
|
||||
return; // scope 0 at $DIR/inline-compatibility.rs:26:2: 26:2
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
- // MIR for `inlined_target_feature` before Inline
|
||||
+ // MIR for `inlined_target_feature` after Inline
|
||||
|
||||
fn inlined_target_feature() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-compatibility.rs:13:40: 13:40
|
||||
let _1: (); // in scope 0 at $DIR/inline-compatibility.rs:14:5: 14:21
|
||||
+ scope 1 {
|
||||
+ }
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inline-compatibility.rs:14:5: 14:21
|
||||
- _1 = target_feature() -> bb1; // scope 0 at $DIR/inline-compatibility.rs:14:5: 14:21
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/inline-compatibility.rs:14:5: 14:19
|
||||
- // + literal: Const { ty: unsafe fn() {target_feature}, val: Value(Scalar(<ZST>)) }
|
||||
- }
|
||||
-
|
||||
- bb1: {
|
||||
+ _1 = const (); // scope 1 at $DIR/inline-compatibility.rs:35:32: 35:34
|
||||
StorageDead(_1); // scope 0 at $DIR/inline-compatibility.rs:14:21: 14:22
|
||||
_0 = const (); // scope 0 at $DIR/inline-compatibility.rs:13:40: 15:2
|
||||
return; // scope 0 at $DIR/inline-compatibility.rs:15:2: 15:2
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
- // MIR for `not_inlined_no_sanitize` before Inline
|
||||
+ // MIR for `not_inlined_no_sanitize` after Inline
|
||||
|
||||
fn not_inlined_no_sanitize() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-compatibility.rs:29:41: 29:41
|
||||
let _1: (); // in scope 0 at $DIR/inline-compatibility.rs:30:5: 30:18
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inline-compatibility.rs:30:5: 30:18
|
||||
_1 = no_sanitize() -> bb1; // scope 0 at $DIR/inline-compatibility.rs:30:5: 30:18
|
||||
// mir::Constant
|
||||
// + span: $DIR/inline-compatibility.rs:30:5: 30:16
|
||||
// + literal: Const { ty: unsafe fn() {no_sanitize}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageDead(_1); // scope 0 at $DIR/inline-compatibility.rs:30:18: 30:19
|
||||
_0 = const (); // scope 0 at $DIR/inline-compatibility.rs:29:41: 31:2
|
||||
return; // scope 0 at $DIR/inline-compatibility.rs:31:2: 31:2
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
- // MIR for `not_inlined_target_feature` before Inline
|
||||
+ // MIR for `not_inlined_target_feature` after Inline
|
||||
|
||||
fn not_inlined_target_feature() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-compatibility.rs:18:44: 18:44
|
||||
let _1: (); // in scope 0 at $DIR/inline-compatibility.rs:19:5: 19:21
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inline-compatibility.rs:19:5: 19:21
|
||||
_1 = target_feature() -> bb1; // scope 0 at $DIR/inline-compatibility.rs:19:5: 19:21
|
||||
// mir::Constant
|
||||
// + span: $DIR/inline-compatibility.rs:19:5: 19:19
|
||||
// + literal: Const { ty: unsafe fn() {target_feature}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageDead(_1); // scope 0 at $DIR/inline-compatibility.rs:19:21: 19:22
|
||||
_0 = const (); // scope 0 at $DIR/inline-compatibility.rs:18:44: 20:2
|
||||
return; // scope 0 at $DIR/inline-compatibility.rs:20:2: 20:2
|
||||
}
|
||||
}
|
||||
|
4
src/test/rustdoc/auxiliary/intra-link-pub-use.rs
Normal file
4
src/test/rustdoc/auxiliary/intra-link-pub-use.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
#![crate_name = "inner"]
|
||||
|
||||
/// Documentation, including a link to [std::ptr]
|
||||
pub fn f() {}
|
51
src/test/rustdoc/intra-link-disambiguators-removed.rs
Normal file
51
src/test/rustdoc/intra-link-disambiguators-removed.rs
Normal file
|
@ -0,0 +1,51 @@
|
|||
// ignore-tidy-linelength
|
||||
#![deny(intra_doc_link_resolution_failure)]
|
||||
// first try backticks
|
||||
/// Trait: [`trait@Name`], fn: [`fn@Name`], [`Name`][`macro@Name`]
|
||||
// @has intra_link_disambiguators_removed/struct.AtDisambiguator.html
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/trait.Name.html"][code]' "Name"
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/fn.Name.html"][code]' "Name"
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/macro.Name.html"][code]' "Name"
|
||||
pub struct AtDisambiguator;
|
||||
|
||||
/// fn: [`Name()`], macro: [`Name!`]
|
||||
// @has intra_link_disambiguators_removed/struct.SymbolDisambiguator.html
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/fn.Name.html"][code]' "Name()"
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/macro.Name.html"][code]' "Name!"
|
||||
pub struct SymbolDisambiguator;
|
||||
|
||||
// Now make sure that backticks aren't added if they weren't already there
|
||||
/// [fn@Name]
|
||||
// @has intra_link_disambiguators_removed/trait.Name.html
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/fn.Name.html"]' "Name"
|
||||
// @!has - '//a[@href="../intra_link_disambiguators_removed/fn.Name.html"][code]' "Name"
|
||||
|
||||
// FIXME: this will turn !() into ! alone
|
||||
/// [Name!()]
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/macro.Name.html"]' "Name!"
|
||||
pub trait Name {}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
|
||||
// Try collapsed reference links
|
||||
/// [macro@Name][]
|
||||
// @has intra_link_disambiguators_removed/fn.Name.html
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/macro.Name.html"]' "Name"
|
||||
|
||||
// Try links that have the same text as a generated URL
|
||||
/// Weird URL aligned [../intra_link_disambiguators_removed/macro.Name.html][trait@Name]
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/trait.Name.html"]' "../intra_link_disambiguators_removed/macro.Name.html"
|
||||
pub fn Name() {}
|
||||
|
||||
#[macro_export]
|
||||
// Rustdoc doesn't currently handle links that have weird interspersing of inline code blocks.
|
||||
/// [fn@Na`m`e]
|
||||
// @has intra_link_disambiguators_removed/macro.Name.html
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/fn.Name.html"]' "fn@Name"
|
||||
|
||||
// It also doesn't handle any case where the code block isn't the whole link text:
|
||||
/// [trait@`Name`]
|
||||
// @has - '//a[@href="../intra_link_disambiguators_removed/trait.Name.html"]' "trait@Name"
|
||||
macro_rules! Name {
|
||||
() => ()
|
||||
}
|
27
src/test/rustdoc/intra-link-pub-use.rs
Normal file
27
src/test/rustdoc/intra-link-pub-use.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
// aux-build: intra-link-pub-use.rs
|
||||
#![deny(broken_intra_doc_links)]
|
||||
#![crate_name = "outer"]
|
||||
|
||||
extern crate inner;
|
||||
|
||||
/// [mod@std::env] [g]
|
||||
|
||||
// FIXME: This can't be tested because rustdoc doesn't show documentation on pub re-exports.
|
||||
// Until then, comment out the `htmldocck` test.
|
||||
// This test still does something; namely check that no incorrect errors are emitted when
|
||||
// documenting the re-export.
|
||||
|
||||
// @has outer/index.html
|
||||
// @ has - '//a[@href="https://doc.rust-lang.org/nightly/std/env/fn.var.html"]' "std::env"
|
||||
// @ has - '//a[@href="../outer/fn.f.html"]' "g"
|
||||
pub use f as g;
|
||||
|
||||
// FIXME: same as above
|
||||
/// [std::env]
|
||||
extern crate self as _;
|
||||
|
||||
// Make sure the documentation is actually correct by documenting an inlined re-export
|
||||
/// [mod@std::env]
|
||||
// @has outer/fn.f.html
|
||||
// @has - '//a[@href="https://doc.rust-lang.org/nightly/std/env/index.html"]' "std::env"
|
||||
pub use inner::f;
|
29
src/test/ui/mir/issue-76248.rs
Normal file
29
src/test/ui/mir/issue-76248.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
// This used to ICE during codegen after MIR inlining of g into f.
|
||||
// The root cause was a missing fold of length constant in Rvalue::Repeat.
|
||||
// Regression test for #76248.
|
||||
//
|
||||
// build-pass
|
||||
// compile-flags: -Zmir-opt-level=2
|
||||
|
||||
const N: usize = 1;
|
||||
|
||||
pub struct Elem<M> {
|
||||
pub x: [usize; N],
|
||||
pub m: M,
|
||||
}
|
||||
|
||||
pub fn f() -> Elem<()> {
|
||||
g(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn g<M>(m: M) -> Elem<M> {
|
||||
Elem {
|
||||
x: [0; N],
|
||||
m,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
f();
|
||||
}
|
27
src/test/ui/type-alias-impl-trait/issue-72793.rs
Normal file
27
src/test/ui/type-alias-impl-trait/issue-72793.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
// build-pass
|
||||
|
||||
// Regression test for #72793.
|
||||
// FIXME: This still shows ICE with `-Zmir-opt-level=2`.
|
||||
|
||||
#![feature(type_alias_impl_trait)]
|
||||
|
||||
trait T { type Item; }
|
||||
|
||||
type Alias<'a> = impl T<Item = &'a ()>;
|
||||
|
||||
struct S;
|
||||
impl<'a> T for &'a S {
|
||||
type Item = &'a ();
|
||||
}
|
||||
|
||||
fn filter_positive<'a>() -> Alias<'a> {
|
||||
&S
|
||||
}
|
||||
|
||||
fn with_positive(fun: impl Fn(Alias<'_>)) {
|
||||
fun(filter_positive());
|
||||
}
|
||||
|
||||
fn main() {
|
||||
with_positive(|_| ());
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue