1
Fork 0

Fix merge conflict with recent PR

This commit is contained in:
Alexis Bourget 2020-07-19 22:15:44 +02:00
commit 471dd52d77
102 changed files with 3045 additions and 1295 deletions

View file

@ -2821,6 +2821,13 @@ dependencies = [
"rls-span",
]
[[package]]
name = "rust-demangler"
version = "0.0.0"
dependencies = [
"rustc-demangle",
]
[[package]]
name = "rustbook"
version = "0.1.0"

View file

@ -17,6 +17,7 @@ members = [
"src/tools/remote-test-client",
"src/tools/remote-test-server",
"src/tools/rust-installer",
"src/tools/rust-demangler",
"src/tools/cargo",
"src/tools/rustdoc",
"src/tools/rls",

View file

@ -370,6 +370,7 @@ impl<'a> Builder<'a> {
tool::Cargo,
tool::Rls,
tool::RustAnalyzer,
tool::RustDemangler,
tool::Rustdoc,
tool::Clippy,
tool::CargoClippy,

View file

@ -1022,6 +1022,10 @@ impl Step for Compiletest {
cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler));
}
if mode == "run-make" && suite.ends_with("fulldeps") {
cmd.arg("--rust-demangler-path").arg(builder.tool_exe(Tool::RustDemangler));
}
cmd.arg("--src-base").arg(builder.src.join("src/test").join(suite));
cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));

View file

@ -361,6 +361,7 @@ bootstrap_tool!(
Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true;
BuildManifest, "src/tools/build-manifest", "build-manifest";
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client";
RustDemangler, "src/tools/rust-demangler", "rust-demangler";
RustInstaller, "src/tools/rust-installer", "fabricate", is_external_tool = true;
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes";
ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors";

View file

@ -1,34 +1,44 @@
FROM centos:5
# We use Debian 6 (glibc 2.11, kernel 2.6.32) as a common base for other
# distros that still need Rust support: RHEL 6 (glibc 2.12, kernel 2.6.32) and
# SLES 11 SP4 (glibc 2.11, kernel 3.0).
FROM debian:6
WORKDIR /build
# Centos 5 is EOL and is no longer available from the usual mirrors, so switch
# to http://vault.centos.org/
RUN sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
RUN sed -i 's/mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo
RUN sed -i 's|#\(baseurl.*\)mirror.centos.org/centos/$releasever|\1vault.centos.org/5.11|' /etc/yum.repos.d/*.repo
# Debian 6 is EOL and no longer available from the usual mirrors,
# so we'll need to switch to http://archive.debian.org/
RUN sed -i '/updates/d' /etc/apt/sources.list && \
sed -i 's/httpredir/archive/' /etc/apt/sources.list
RUN yum upgrade -y && yum install -y \
curl \
RUN apt-get update && \
apt-get install --allow-unauthenticated -y --no-install-recommends \
automake \
bzip2 \
gcc \
gcc-c++ \
make \
glibc-devel \
perl \
zlib-devel \
ca-certificates \
curl \
file \
xz \
which \
pkgconfig \
g++ \
g++-multilib \
gcc \
gcc-multilib \
git \
lib32z1-dev \
libedit-dev \
libncurses-dev \
make \
patch \
perl \
pkg-config \
unzip \
wget \
autoconf \
gettext
xz-utils \
zlib1g-dev
ENV PATH=/rustroot/bin:$PATH
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
RUN mkdir /home/user
COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
@ -38,14 +48,14 @@ COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
COPY host-x86_64/dist-x86_64-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
# The `curl` binary on Debian 6 doesn't support SNI which is needed for fetching
# some https urls we have, so install a new version of libcurl + curl which is
# using the openssl we just built previously.
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY host-x86_64/dist-x86_64-linux/build-curl.sh /tmp/
RUN ./build-curl.sh
RUN ./build-curl.sh && apt-get remove -y curl
# binutils < 2.22 has a bug where the 32-bit executables it generates
# immediately segfault in Rust, so we need to install our own binutils.
@ -54,40 +64,24 @@ RUN ./build-curl.sh
COPY host-x86_64/dist-x86_64-linux/build-binutils.sh /tmp/
RUN ./build-binutils.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# Need a newer version of gcc than centos has to compile LLVM nowadays
# Need at least GCC 5.1 to compile LLVM nowadays
COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/
RUN ./build-gcc.sh
RUN ./build-gcc.sh && apt-get remove -y gcc g++
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
# Debian 6 has Python 2.6 by default, but LLVM needs 2.7+
COPY host-x86_64/dist-x86_64-linux/build-python.sh /tmp/
RUN ./build-python.sh
# Now build LLVM+Clang 7, afterwards configuring further compilations to use the
# LLVM needs cmake 3.4.3 or higher, and is planning to raise to 3.13.4.
COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# Now build LLVM+Clang, afterwards configuring further compilations to use the
# clang/clang++ compilers.
COPY host-x86_64/dist-x86_64-linux/build-clang.sh host-x86_64/dist-x86_64-linux/llvm-project-centos.patch /tmp/
COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
RUN ./build-clang.sh
ENV CC=clang CXX=clang++
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
COPY host-x86_64/dist-x86_64-linux/build-git.sh /tmp/
RUN ./build-git.sh
# for sanitizers, we need kernel headers files newer than the ones CentOS ships
# with so we install newer ones here
COPY host-x86_64/dist-x86_64-linux/build-headers.sh /tmp/
RUN ./build-headers.sh
# OpenSSL requires a more recent version of perl
# with so we install newer ones here
COPY host-x86_64/dist-x86_64-linux/build-perl.sh /tmp/
RUN ./build-perl.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
@ -117,4 +111,11 @@ ENV CFLAGS -mstackrealign
# libcurl, instead it should compile its own.
ENV LIBCURL_NO_PKG_CONFIG 1
# There was a bad interaction between "old" 32-bit binaries on current 64-bit
# kernels with selinux enabled, where ASLR mmap would sometimes choose a low
# address and then block it for being below `vm.mmap_min_addr` -> `EACCES`.
# This is probably a kernel bug, but setting `ulimit -Hs` works around it.
# See also `src/ci/run.sh` where this takes effect.
ENV SET_HARD_RLIMIT_STACK 1
ENV DIST_REQUIRE_ALL_TOOLS 1

View file

@ -1,34 +1,44 @@
FROM centos:5
# We use Debian 6 (glibc 2.11, kernel 2.6.32) as a common base for other
# distros that still need Rust support: RHEL 6 (glibc 2.12, kernel 2.6.32) and
# SLES 11 SP4 (glibc 2.11, kernel 3.0).
FROM debian:6
WORKDIR /build
# Centos 5 is EOL and is no longer available from the usual mirrors, so switch
# to http://vault.centos.org/
RUN sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
RUN sed -i 's/mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo
RUN sed -i 's|#\(baseurl.*\)mirror.centos.org/centos/$releasever|\1vault.centos.org/5.11|' /etc/yum.repos.d/*.repo
# Debian 6 is EOL and no longer available from the usual mirrors,
# so we'll need to switch to http://archive.debian.org/
RUN sed -i '/updates/d' /etc/apt/sources.list && \
sed -i 's/httpredir/archive/' /etc/apt/sources.list
RUN yum upgrade -y && yum install -y \
curl \
RUN apt-get update && \
apt-get install --allow-unauthenticated -y --no-install-recommends \
automake \
bzip2 \
gcc \
gcc-c++ \
make \
glibc-devel \
perl \
zlib-devel \
ca-certificates \
curl \
file \
xz \
which \
pkgconfig \
g++ \
g++-multilib \
gcc \
gcc-multilib \
git \
lib32z1-dev \
libedit-dev \
libncurses-dev \
make \
patch \
perl \
pkg-config \
unzip \
wget \
autoconf \
gettext
xz-utils \
zlib1g-dev
ENV PATH=/rustroot/bin:$PATH
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
RUN mkdir /home/user
COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
@ -38,14 +48,14 @@ COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
COPY host-x86_64/dist-x86_64-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
# The `curl` binary on Debian 6 doesn't support SNI which is needed for fetching
# some https urls we have, so install a new version of libcurl + curl which is
# using the openssl we just built previously.
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY host-x86_64/dist-x86_64-linux/build-curl.sh /tmp/
RUN ./build-curl.sh
RUN ./build-curl.sh && apt-get remove -y curl
# binutils < 2.22 has a bug where the 32-bit executables it generates
# immediately segfault in Rust, so we need to install our own binutils.
@ -54,40 +64,24 @@ RUN ./build-curl.sh
COPY host-x86_64/dist-x86_64-linux/build-binutils.sh /tmp/
RUN ./build-binutils.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# Build a version of gcc capable of building LLVM 6
# Need at least GCC 5.1 to compile LLVM nowadays
COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/
RUN ./build-gcc.sh
RUN ./build-gcc.sh && apt-get remove -y gcc g++
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
# Debian 6 has Python 2.6 by default, but LLVM needs 2.7+
COPY host-x86_64/dist-x86_64-linux/build-python.sh /tmp/
RUN ./build-python.sh
# Now build LLVM+Clang 7, afterwards configuring further compilations to use the
# LLVM needs cmake 3.4.3 or higher, and is planning to raise to 3.13.4.
COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# Now build LLVM+Clang, afterwards configuring further compilations to use the
# clang/clang++ compilers.
COPY host-x86_64/dist-x86_64-linux/build-clang.sh host-x86_64/dist-x86_64-linux/llvm-project-centos.patch /tmp/
COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
RUN ./build-clang.sh
ENV CC=clang CXX=clang++
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
COPY host-x86_64/dist-x86_64-linux/build-git.sh /tmp/
RUN ./build-git.sh
# for sanitizers, we need kernel headers files newer than the ones CentOS ships
# with so we install newer ones here
COPY host-x86_64/dist-x86_64-linux/build-headers.sh /tmp/
RUN ./build-headers.sh
# OpenSSL requires a more recent version of perl
# with so we install newer ones here
COPY host-x86_64/dist-x86_64-linux/build-perl.sh /tmp/
RUN ./build-perl.sh
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh

View file

@ -12,9 +12,6 @@ cd llvm-project
curl -L https://github.com/llvm/llvm-project/archive/$LLVM.tar.gz | \
tar xzf - --strip-components=1
yum install -y patch
patch -Np1 < ../llvm-project-centos.patch
mkdir clang-build
cd clang-build

View file

@ -3,14 +3,15 @@
set -ex
source shared.sh
curl https://cmake.org/files/v3.6/cmake-3.6.3.tar.gz | tar xzf -
CMAKE=3.13.4
curl -L https://github.com/Kitware/CMake/releases/download/v$CMAKE/cmake-$CMAKE.tar.gz | tar xzf -
mkdir cmake-build
cd cmake-build
hide_output ../cmake-3.6.3/configure --prefix=/rustroot
hide_output ../cmake-$CMAKE/configure --prefix=/rustroot
hide_output make -j10
hide_output make install
cd ..
rm -rf cmake-build
rm -rf cmake-3.6.3
rm -rf cmake-$CMAKE

View file

@ -36,4 +36,3 @@ hide_output make install
cd ..
rm -rf curl-build
rm -rf curl-$VERSION
yum erase -y curl

View file

@ -37,4 +37,3 @@ ln -s gcc /rustroot/bin/cc
cd ..
rm -rf gcc-build
rm -rf gcc-$GCC
yum erase -y gcc gcc-c++ binutils

View file

@ -20,6 +20,18 @@ if [ -f /proc/sys/kernel/core_pattern ]; then
ulimit -c unlimited
fi
# There was a bad interaction between "old" 32-bit binaries on current 64-bit
# kernels with selinux enabled, where ASLR mmap would sometimes choose a low
# address and then block it for being below `vm.mmap_min_addr` -> `EACCES`.
# This is probably a kernel bug, but setting `ulimit -Hs` works around it.
# See also `dist-i686-linux` where this setting is enabled.
if [ "$SET_HARD_RLIMIT_STACK" = "1" ]; then
rlimit_stack=$(ulimit -Ss)
if [ "$rlimit_stack" != "" ]; then
ulimit -Hs "$rlimit_stack"
fi
fi
ci_dir=`cd $(dirname $0) && pwd`
source "$ci_dir/shared.sh"

View file

@ -47,9 +47,9 @@ use UnderflowResult::*;
/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// [`Ord`]: ../../std/cmp/trait.Ord.html
/// [`Cell`]: ../../std/cell/struct.Cell.html
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`Ord`]: core::cmp::Ord
/// [`Cell`]: core::cell::Cell
/// [`RefCell`]: core::cell::RefCell
///
/// # Examples
///
@ -256,8 +256,7 @@ where
/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.BTreeMap.html#method.iter
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`iter`]: BTreeMap::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a, V: 'a> {
range: Range<'a, K, V>,
@ -276,8 +275,7 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.BTreeMap.html#method.iter_mut
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`iter_mut`]: BTreeMap::iter_mut
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct IterMut<'a, K: 'a, V: 'a> {
@ -290,8 +288,7 @@ pub struct IterMut<'a, K: 'a, V: 'a> {
/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.BTreeMap.html#method.into_iter
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`into_iter`]: IntoIterator::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
front: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
@ -315,8 +312,7 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.BTreeMap.html#method.keys
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`keys`]: BTreeMap::keys
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
@ -334,8 +330,7 @@ impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.BTreeMap.html#method.values
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`values`]: BTreeMap::values
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
@ -353,8 +348,7 @@ impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.BTreeMap.html#method.values_mut
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`values_mut`]: BTreeMap::values_mut
#[stable(feature = "map_values_mut", since = "1.10.0")]
#[derive(Debug)]
pub struct ValuesMut<'a, K: 'a, V: 'a> {
@ -366,8 +360,7 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> {
/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`range`]: struct.BTreeMap.html#method.range
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`range`]: BTreeMap::range
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, K: 'a, V: 'a> {
front: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
@ -386,8 +379,7 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
/// documentation for more.
///
/// [`range_mut`]: struct.BTreeMap.html#method.range_mut
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`range_mut`]: BTreeMap::range_mut
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct RangeMut<'a, K: 'a, V: 'a> {
front: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
@ -412,8 +404,7 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
///
/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
///
/// [`BTreeMap`]: struct.BTreeMap.html
/// [`entry`]: struct.BTreeMap.html#method.entry
/// [`entry`]: BTreeMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// A vacant entry.

View file

@ -22,9 +22,9 @@ use super::Recover;
/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// [`Ord`]: ../../std/cmp/trait.Ord.html
/// [`Cell`]: ../../std/cell/struct.Cell.html
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`Ord`]: core::cmp::Ord
/// [`Cell`]: core::cell::Cell
/// [`RefCell`]: core::cell::RefCell
///
/// # Examples
///

View file

@ -240,8 +240,6 @@ impl str {
/// While doing so, it attempts to find matches of a pattern. If it finds any, it
/// replaces them with the replacement string slice.
///
/// [`String`]: string/struct.String.html
///
/// # Examples
///
/// Basic usage:
@ -280,8 +278,6 @@ impl str {
/// While doing so, it attempts to find matches of a pattern. If it finds any, it
/// replaces them with the replacement string slice at most `count` times.
///
/// [`String`]: string/struct.String.html
///
/// # Examples
///
/// Basic usage:
@ -324,8 +320,6 @@ impl str {
/// the case, this function returns a [`String`] instead of modifying the
/// parameter in-place.
///
/// [`String`]: string/struct.String.html
///
/// # Examples
///
/// Basic usage:
@ -411,8 +405,6 @@ impl str {
/// the case, this function returns a [`String`] instead of modifying the
/// parameter in-place.
///
/// [`String`]: string/struct.String.html
///
/// # Examples
///
/// Basic usage:
@ -459,8 +451,7 @@ impl str {
/// Converts a [`Box<str>`] into a [`String`] without copying or allocating.
///
/// [`String`]: string/struct.String.html
/// [`Box<str>`]: boxed/struct.Box.html
/// [`Box<str>`]: Box
///
/// # Examples
///
@ -485,8 +476,6 @@ impl str {
///
/// This function will panic if the capacity would overflow.
///
/// [`String`]: string/struct.String.html
///
/// # Examples
///
/// Basic usage:
@ -525,7 +514,7 @@ impl str {
/// assert_eq!("GRüßE, JüRGEN ❤", s.to_ascii_uppercase());
/// ```
///
/// [`make_ascii_uppercase`]: #method.make_ascii_uppercase
/// [`make_ascii_uppercase`]: str::make_ascii_uppercase
/// [`to_uppercase`]: #method.to_uppercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
@ -555,7 +544,7 @@ impl str {
/// assert_eq!("grüße, jürgen ❤", s.to_ascii_lowercase());
/// ```
///
/// [`make_ascii_lowercase`]: #method.make_ascii_lowercase
/// [`make_ascii_lowercase`]: str::make_ascii_lowercase
/// [`to_lowercase`]: #method.to_lowercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]

View file

@ -4,8 +4,6 @@
//! [`ToString`]s, and several error types that may result from working with
//! [`String`]s.
//!
//! [`ToString`]: trait.ToString.html
//!
//! # Examples
//!
//! There are multiple ways to create a new [`String`] from a string literal:
@ -20,8 +18,6 @@
//! You can create a new [`String`] from an existing one by concatenating with
//! `+`:
//!
//! [`String`]: struct.String.html
//!
//! ```
//! let s = "Hello".to_string();
//!
@ -67,11 +63,11 @@ use crate::vec::Vec;
/// contents of the string. It has a close relationship with its borrowed
/// counterpart, the primitive [`str`].
///
/// [`str`]: ../../std/primitive.str.html
///
/// # Examples
///
/// You can create a `String` from a literal string with [`String::from`]:
/// You can create a `String` from [a literal string][str] with [`String::from`]:
///
/// [`String::from`]: From::from
///
/// ```
/// let hello = String::from("Hello, world!");
@ -87,10 +83,8 @@ use crate::vec::Vec;
/// hello.push_str("orld!");
/// ```
///
/// [`String::from`]: #method.from
/// [`char`]: ../../std/primitive.char.html
/// [`push`]: #method.push
/// [`push_str`]: #method.push_str
/// [`push`]: String::push
/// [`push_str`]: String::push_str
///
/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
/// the [`from_utf8`] method:
@ -105,7 +99,7 @@ use crate::vec::Vec;
/// assert_eq!("💖", sparkle_heart);
/// ```
///
/// [`from_utf8`]: #method.from_utf8
/// [`from_utf8`]: String::from_utf8
///
/// # UTF-8
///
@ -128,8 +122,8 @@ use crate::vec::Vec;
/// The [`bytes`] and [`chars`] methods return iterators over the first
/// two, respectively.
///
/// [`bytes`]: #method.bytes
/// [`chars`]: #method.chars
/// [`bytes`]: str::bytes
/// [`chars`]: str::chars
///
/// # Deref
///
@ -215,9 +209,9 @@ use crate::vec::Vec;
/// assert_eq!(String::from("Once upon a time..."), s);
/// ```
///
/// [`as_ptr`]: #method.as_ptr
/// [`len`]: #method.len
/// [`capacity`]: #method.capacity
/// [`as_ptr`]: str::as_ptr
/// [`len`]: String::len
/// [`capacity`]: String::capacity
///
/// If a `String` has enough capacity, adding elements to it will not
/// re-allocate. For example, consider this program:
@ -259,7 +253,7 @@ use crate::vec::Vec;
/// }
/// ```
///
/// [`with_capacity`]: #method.with_capacity
/// [`with_capacity`]: String::with_capacity
///
/// We end up with a different output:
///
@ -274,9 +268,9 @@ use crate::vec::Vec;
///
/// Here, there's no need to allocate more memory inside the loop.
///
/// [`&str`]: ../../std/primitive.str.html
/// [`Deref`]: ../../std/ops/trait.Deref.html
/// [`as_str()`]: struct.String.html#method.as_str
/// [`&str`]: str
/// [`Deref`]: core::ops::Deref
/// [`as_str()`]: String::as_str
#[derive(PartialOrd, Eq, Ord)]
#[cfg_attr(not(test), rustc_diagnostic_item = "string_type")]
#[stable(feature = "rust1", since = "1.0.0")]
@ -291,20 +285,18 @@ pub struct String {
/// [`into_bytes`] method will give back the byte vector that was used in the
/// conversion attempt.
///
/// [`from_utf8`]: struct.String.html#method.from_utf8
/// [`String`]: struct.String.html
/// [`into_bytes`]: struct.FromUtf8Error.html#method.into_bytes
/// [`from_utf8`]: String::from_utf8
/// [`into_bytes`]: FromUtf8Error::into_bytes
///
/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
/// through the [`utf8_error`] method.
///
/// [`Utf8Error`]: ../../std/str/struct.Utf8Error.html
/// [`std::str`]: ../../std/str/index.html
/// [`u8`]: ../../std/primitive.u8.html
/// [`&str`]: ../../std/primitive.str.html
/// [`utf8_error`]: #method.utf8_error
/// [`Utf8Error`]: core::str::Utf8Error
/// [`std::str`]: core::str
/// [`&str`]: str
/// [`utf8_error`]: Self::utf8_error
///
/// # Examples
///
@ -330,9 +322,7 @@ pub struct FromUtf8Error {
///
/// This type is the error type for the [`from_utf16`] method on [`String`].
///
/// [`from_utf16`]: struct.String.html#method.from_utf16
/// [`String`]: struct.String.html
///
/// [`from_utf16`]: String::from_utf16
/// # Examples
///
/// Basic usage:
@ -358,7 +348,7 @@ impl String {
/// consider the [`with_capacity`] method to prevent excessive
/// re-allocation.
///
/// [`with_capacity`]: #method.with_capacity
/// [`with_capacity`]: String::with_capacity
///
/// # Examples
///
@ -383,12 +373,12 @@ impl String {
/// appending a bunch of data to the `String`, reducing the number of
/// reallocations it needs to do.
///
/// [`capacity`]: #method.capacity
/// [`capacity`]: String::capacity
///
/// If the given capacity is `0`, no allocation will occur, and this method
/// is identical to the [`new`] method.
///
/// [`new`]: #method.new
/// [`new`]: String::new
///
/// # Examples
///
@ -479,15 +469,10 @@ impl String {
/// See the docs for [`FromUtf8Error`] for more details on what you can do
/// with this error.
///
/// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
/// [`String`]: struct.String.html
/// [`u8`]: ../../std/primitive.u8.html
/// [`Vec<u8>`]: ../../std/vec/struct.Vec.html
/// [`&str`]: ../../std/primitive.str.html
/// [`str::from_utf8`]: ../../std/str/fn.from_utf8.html
/// [`into_bytes`]: struct.String.html#method.into_bytes
/// [`FromUtf8Error`]: struct.FromUtf8Error.html
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`from_utf8_unchecked`]: String::from_utf8_unchecked
/// [`Vec<u8>`]: crate::vec::Vec
/// [`&str`]: str
/// [`into_bytes`]: String::into_bytes
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
@ -506,16 +491,15 @@ impl String {
/// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: <20>
///
/// [`u8`]: ../../std/primitive.u8.html
/// [byteslice]: ../../std/primitive.slice.html
/// [U+FFFD]: ../char/constant.REPLACEMENT_CHARACTER.html
/// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
///
/// If you are sure that the byte slice is valid UTF-8, and you don't want
/// to incur the overhead of the conversion, there is an unsafe version
/// of this function, [`from_utf8_unchecked`], which has the same behavior
/// but skips the checks.
///
/// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
/// [`from_utf8_unchecked`]: String::from_utf8_unchecked
///
/// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid
/// UTF-8, then we need to insert the replacement characters, which will
@ -523,7 +507,7 @@ impl String {
/// it's already valid UTF-8, we don't need a new allocation. This return
/// type allows us to handle both cases.
///
/// [`Cow<'a, str>`]: ../../std/borrow/enum.Cow.html
/// [`Cow<'a, str>`]: crate::borrow::Cow
///
/// # Examples
///
@ -583,8 +567,6 @@ impl String {
/// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`]
/// if `v` contains any invalid data.
///
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
///
/// # Examples
///
/// Basic usage:
@ -623,9 +605,9 @@ impl String {
/// `from_utf16_lossy` returns a `String` since the UTF-16 to UTF-8
/// conversion requires a memory allocation.
///
/// [`from_utf8_lossy`]: #method.from_utf8_lossy
/// [`Cow<'a, str>`]: ../borrow/enum.Cow.html
/// [U+FFFD]: ../char/constant.REPLACEMENT_CHARACTER.html
/// [`from_utf8_lossy`]: String::from_utf8_lossy
/// [`Cow<'a, str>`]: crate::borrow::Cow
/// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
///
/// # Examples
///
@ -659,7 +641,7 @@ impl String {
/// into a `String` with the [`from_raw_parts`] function, allowing
/// the destructor to perform the cleanup.
///
/// [`from_raw_parts`]: #method.from_raw_parts
/// [`from_raw_parts`]: String::from_raw_parts
///
/// # Examples
///
@ -732,7 +714,7 @@ impl String {
///
/// See the safe version, [`from_utf8`], for more details.
///
/// [`from_utf8`]: struct.String.html#method.from_utf8
/// [`from_utf8`]: String::from_utf8
///
/// # Safety
///
@ -867,8 +849,7 @@ impl String {
///
/// Panics if the new capacity overflows [`usize`].
///
/// [`reserve_exact`]: struct.String.html#method.reserve_exact
/// [`usize`]: ../../std/primitive.usize.html
/// [`reserve_exact`]: String::reserve_exact
///
/// # Examples
///
@ -911,7 +892,7 @@ impl String {
/// Consider using the [`reserve`] method unless you absolutely know
/// better than the allocator.
///
/// [`reserve`]: #method.reserve
/// [`reserve`]: String::reserve
///
/// # Panics
///
@ -1076,8 +1057,6 @@ impl String {
/// Appends the given [`char`] to the end of this `String`.
///
/// [`char`]: ../../std/primitive.char.html
///
/// # Examples
///
/// Basic usage:
@ -1104,7 +1083,7 @@ impl String {
///
/// The inverse of this method is [`from_utf8`].
///
/// [`from_utf8`]: #method.from_utf8
/// [`from_utf8`]: String::from_utf8
///
/// # Examples
///
@ -1133,8 +1112,6 @@ impl String {
///
/// Panics if `new_len` does not lie on a [`char`] boundary.
///
/// [`char`]: ../../std/primitive.char.html
///
/// # Examples
///
/// Basic usage:
@ -1159,8 +1136,6 @@ impl String {
///
/// Returns [`None`] if this `String` is empty.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
///
/// # Examples
///
/// Basic usage:
@ -1195,8 +1170,6 @@ impl String {
/// Panics if `idx` is larger than or equal to the `String`'s length,
/// or if it does not lie on a [`char`] boundary.
///
/// [`char`]: ../../std/primitive.char.html
///
/// # Examples
///
/// Basic usage:
@ -1297,8 +1270,6 @@ impl String {
/// Panics if `idx` is larger than the `String`'s length, or if it does not
/// lie on a [`char`] boundary.
///
/// [`char`]: ../../std/primitive.char.html
///
/// # Examples
///
/// Basic usage:
@ -1346,8 +1317,6 @@ impl String {
/// Panics if `idx` is larger than the `String`'s length, or if it does not
/// lie on a [`char`] boundary.
///
/// [`char`]: ../../std/primitive.char.html
///
/// # Examples
///
/// Basic usage:
@ -1507,8 +1476,6 @@ impl String {
/// Panics if the starting point or end point do not lie on a [`char`]
/// boundary, or if they're out of bounds.
///
/// [`char`]: ../../std/primitive.char.html
///
/// # Examples
///
/// Basic usage:
@ -1567,9 +1534,6 @@ impl String {
/// Panics if the starting point or end point do not lie on a [`char`]
/// boundary, or if they're out of bounds.
///
/// [`char`]: ../../std/primitive.char.html
/// [`Vec::splice`]: ../../std/vec/struct.Vec.html#method.splice
///
/// # Examples
///
/// Basic usage:
@ -1610,9 +1574,6 @@ impl String {
///
/// This will drop any excess capacity.
///
/// [`Box`]: ../../std/boxed/struct.Box.html
/// [`str`]: ../../std/primitive.str.html
///
/// # Examples
///
/// Basic usage:
@ -1680,10 +1641,8 @@ impl FromUtf8Error {
/// an analogue to `FromUtf8Error`. See its documentation for more details
/// on using it.
///
/// [`Utf8Error`]: ../../std/str/struct.Utf8Error.html
/// [`std::str`]: ../../std/str/index.html
/// [`u8`]: ../../std/primitive.u8.html
/// [`&str`]: ../../std/primitive.str.html
/// [`std::str`]: core::str
/// [`&str`]: str
///
/// # Examples
///
@ -2187,7 +2146,7 @@ impl ops::DerefMut for String {
///
/// This alias exists for backwards compatibility, and may be eventually deprecated.
///
/// [`Infallible`]: ../../core/convert/enum.Infallible.html
/// [`Infallible`]: core::convert::Infallible
#[stable(feature = "str_parse_error", since = "1.5.0")]
pub type ParseError = core::convert::Infallible;
@ -2207,7 +2166,7 @@ impl FromStr for String {
/// [`Display`] should be implemented instead, and you get the `ToString`
/// implementation for free.
///
/// [`Display`]: ../../std/fmt/trait.Display.html
/// [`Display`]: fmt::Display
#[stable(feature = "rust1", since = "1.0.0")]
pub trait ToString {
/// Converts the given value to a `String`.
@ -2465,8 +2424,7 @@ impl fmt::Write for String {
/// This struct is created by the [`drain`] method on [`String`]. See its
/// documentation for more.
///
/// [`drain`]: struct.String.html#method.drain
/// [`String`]: struct.String.html
/// [`drain`]: String::drain
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a> {
/// Will be used as &'a mut String in the destructor

View file

@ -45,7 +45,8 @@ use crate::intrinsics;
/// ```
#[inline]
#[stable(feature = "unreachable", since = "1.27.0")]
pub unsafe fn unreachable_unchecked() -> ! {
#[rustc_const_unstable(feature = "const_unreachable_unchecked", issue = "53188")]
pub const unsafe fn unreachable_unchecked() -> ! {
// SAFETY: the safety contract for `intrinsics::unreachable` must
// be upheld by the caller.
unsafe { intrinsics::unreachable() }

View file

@ -932,6 +932,7 @@ extern "rust-intrinsic" {
///
/// The stabilized version of this intrinsic is
/// [`std::hint::unreachable_unchecked`](../../std/hint/fn.unreachable_unchecked.html).
#[rustc_const_unstable(feature = "const_unreachable_unchecked", issue = "53188")]
pub fn unreachable() -> !;
/// Informs the optimizer that a condition is always true.
@ -1957,8 +1958,14 @@ extern "rust-intrinsic" {
/// Internal placeholder for injecting code coverage counters when the "instrument-coverage"
/// option is enabled. The placeholder is replaced with `llvm.instrprof.increment` during code
/// generation.
#[cfg(not(bootstrap))]
#[lang = "count_code_region"]
pub fn count_code_region(index: u32, start_byte_pos: u32, end_byte_pos: u32);
pub fn count_code_region(
function_source_hash: u64,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
);
/// Internal marker for code coverage expressions, injected into the MIR when the
/// "instrument-coverage" option is enabled. This intrinsic is not converted into a
@ -1966,6 +1973,8 @@ extern "rust-intrinsic" {
/// "coverage map", which is injected into the generated code, as additional data.
/// This marker identifies a code region and two other counters or counter expressions
/// whose sum is the number of times the code region was executed.
#[cfg(not(bootstrap))]
#[lang = "coverage_counter_add"]
pub fn coverage_counter_add(
index: u32,
left_index: u32,
@ -1977,6 +1986,8 @@ extern "rust-intrinsic" {
/// This marker identifies a code region and two other counters or counter expressions
/// whose difference is the number of times the code region was executed.
/// (See `coverage_counter_add` for more information.)
#[cfg(not(bootstrap))]
#[lang = "coverage_counter_subtract"]
pub fn coverage_counter_subtract(
index: u32,
left_index: u32,

View file

@ -92,6 +92,7 @@
#![feature(const_slice_ptr_len)]
#![feature(const_type_name)]
#![feature(const_likely)]
#![feature(const_unreachable_unchecked)]
#![feature(custom_inner_attributes)]
#![feature(decl_macro)]
#![feature(doc_cfg)]

View file

@ -4,7 +4,7 @@
//!
//! For more details, see the [`std::str`] module.
//!
//! [`std::str`]: ../../std/str/index.html
//! [`std::str`]: self
#![stable(feature = "rust1", since = "1.0.0")]
@ -163,13 +163,11 @@ Section: Creating a string
/// Errors which can occur when attempting to interpret a sequence of [`u8`]
/// as a string.
///
/// [`u8`]: ../../std/primitive.u8.html
///
/// As such, the `from_utf8` family of functions and methods for both [`String`]s
/// and [`&str`]s make use of this error, for example.
///
/// [`String`]: ../../std/string/struct.String.html#method.from_utf8
/// [`&str`]: ../../std/str/fn.from_utf8.html
/// [`&str`]: from_utf8
///
/// # Examples
///

View file

@ -9,3 +9,7 @@ pub use self::poll::Poll;
mod wake;
#[stable(feature = "futures_api", since = "1.36.0")]
pub use self::wake::{Context, RawWaker, RawWakerVTable, Waker};
mod ready;
#[unstable(feature = "ready_macro", issue = "70922")]
pub use ready::ready;

60
src/libcore/task/ready.rs Normal file
View file

@ -0,0 +1,60 @@
/// Extracts the successful type of a `Poll<T>`.
///
/// This macro bakes in propagation of `Pending` signals by returning early.
///
/// # Examples
///
/// ```
/// #![feature(future_readiness_fns)]
/// #![feature(ready_macro)]
///
/// use core::task::{ready, Context, Poll};
/// use core::future::{self, Future};
/// use core::pin::Pin;
///
/// pub fn do_poll(cx: &mut Context<'_>) -> Poll<()> {
/// let mut fut = future::ready(42);
/// let fut = Pin::new(&mut fut);
///
/// let num = ready!(fut.poll(cx));
/// # drop(num);
/// // ... use num
///
/// Poll::Ready(())
/// }
/// ```
///
/// The `ready!` call expands to:
///
/// ```
/// # #![feature(future_readiness_fns)]
/// # #![feature(ready_macro)]
/// #
/// # use core::task::{Context, Poll};
/// # use core::future::{self, Future};
/// # use core::pin::Pin;
/// #
/// # pub fn do_poll(cx: &mut Context<'_>) -> Poll<()> {
/// # let mut fut = future::ready(42);
/// # let fut = Pin::new(&mut fut);
/// #
/// let num = match fut.poll(cx) {
/// Poll::Ready(t) => t,
/// Poll::Pending => return Poll::Pending,
/// };
/// # drop(num);
/// # // ... use num
/// #
/// # Poll::Ready(())
/// # }
/// ```
#[unstable(feature = "ready_macro", issue = "70922")]
#[rustc_macro_transparency = "semitransparent"]
pub macro ready($e:expr) {
match $e {
$crate::task::Poll::Ready(t) => t,
$crate::task::Poll::Pending => {
return $crate::task::Poll::Pending;
}
}
}

View file

@ -133,6 +133,9 @@ fn set_probestack(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
return;
}
// FIXME(richkadel): Make sure probestack plays nice with `-Z instrument-coverage`
// or disable it if not, similar to above early exits.
// Flag our internal `__rust_probestack` function as the stack probe symbol.
// This is defined in the `compiler-builtins` crate for each architecture.
llvm::AddFunctionAttrStringValue(

View file

@ -144,17 +144,18 @@ pub fn compile_codegen_unit(
}
}
// Finalize code coverage by injecting the coverage map. Note, the coverage map will
// also be added to the `llvm.used` variable, created next.
if cx.sess().opts.debugging_opts.instrument_coverage {
cx.coverageinfo_finalize();
}
// Create the llvm.used variable
// This variable has type [N x i8*] and is stored in the llvm.metadata section
if !cx.used_statics().borrow().is_empty() {
cx.create_used_variable()
}
// Finalize code coverage by injecting the coverage map
if cx.sess().opts.debugging_opts.instrument_coverage {
cx.coverageinfo_finalize();
}
// Finalize debuginfo
if cx.sess().opts.debuginfo != DebugInfo::None {
cx.debuginfo_finalize();

View file

@ -1060,7 +1060,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
fn_name, hash, num_counters, index
);
let llfn = unsafe { llvm::LLVMRustGetInstrprofIncrementIntrinsic(self.cx().llmod) };
let llfn = unsafe { llvm::LLVMRustGetInstrProfIncrementIntrinsic(self.cx().llmod) };
let args = &[fn_name, hash, num_counters, index];
let args = self.check_call("call", llfn, args);

View file

@ -493,10 +493,14 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> {
}
if attrs.flags.contains(CodegenFnAttrFlags::USED) {
// This static will be stored in the llvm.used variable which is an array of i8*
let cast = llvm::LLVMConstPointerCast(g, self.type_i8p());
self.used_statics.borrow_mut().push(cast);
self.add_used_global(g);
}
}
}
/// Add a global value to a list to be stored in the `llvm.used` variable, an array of i8*.
fn add_used_global(&self, global: &'ll Value) {
let cast = unsafe { llvm::LLVMConstPointerCast(global, self.type_i8p()) };
self.used_statics.borrow_mut().push(cast);
}
}

View file

@ -0,0 +1,274 @@
use crate::llvm;
use crate::common::CodegenCx;
use crate::coverageinfo;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, MiscMethods};
use rustc_data_structures::fx::FxHashMap;
use rustc_llvm::RustString;
use rustc_middle::ty::Instance;
use rustc_middle::{bug, mir};
use std::collections::BTreeMap;
use std::ffi::CString;
use std::path::PathBuf;
// FIXME(richkadel): Complete all variations of generating and exporting the coverage map to LLVM.
// The current implementation is an initial foundation with basic capabilities (Counters, but not
// CounterExpressions, etc.).
/// Generates and exports the Coverage Map.
///
/// This Coverage Map complies with Coverage Mapping Format version 3 (zero-based encoded as 2),
/// as defined at [LLVM Code Coverage Mapping Format](https://github.com/rust-lang/llvm-project/blob/llvmorg-8.0.0/llvm/docs/CoverageMappingFormat.rst#llvm-code-coverage-mapping-format)
/// and published in Rust's current (July 2020) fork of LLVM. This version is supported by the
/// LLVM coverage tools (`llvm-profdata` and `llvm-cov`) bundled with Rust's fork of LLVM.
///
/// Consequently, Rust's bundled version of Clang also generates Coverage Maps compliant with
/// version 3. Clang's implementation of Coverage Map generation was referenced when implementing
/// this Rust version, and though the format documentation is very explicit and detailed, some
/// undocumented details in Clang's implementation (that may or may not be important) were also
/// replicated for Rust's Coverage Map.
pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
let mut coverage_writer = CoverageMappingWriter::new(cx);
let function_coverage_map = cx.coverage_context().take_function_coverage_map();
// Encode coverage mappings and generate function records
let mut function_records = Vec::<&'ll llvm::Value>::new();
let coverage_mappings_buffer = llvm::build_byte_buffer(|coverage_mappings_buffer| {
for (instance, function_coverage) in function_coverage_map.into_iter() {
if let Some(function_record) = coverage_writer.write_function_mappings_and_record(
instance,
function_coverage,
coverage_mappings_buffer,
) {
function_records.push(function_record);
}
}
});
// Encode all filenames covered in this module, ordered by `file_id`
let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| {
coverageinfo::write_filenames_section_to_buffer(
&coverage_writer.filenames,
filenames_buffer,
);
});
if coverage_mappings_buffer.len() > 0 {
// Generate the LLVM IR representation of the coverage map and store it in a well-known
// global constant.
coverage_writer.write_coverage_map(
function_records,
filenames_buffer,
coverage_mappings_buffer,
);
}
}
struct CoverageMappingWriter<'a, 'll, 'tcx> {
cx: &'a CodegenCx<'ll, 'tcx>,
filenames: Vec<CString>,
filename_to_index: FxHashMap<CString, u32>,
}
impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> {
fn new(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
Self { cx, filenames: Vec::new(), filename_to_index: FxHashMap::<CString, u32>::default() }
}
/// For the given function, get the coverage region data, stream it to the given buffer, and
/// then generate and return a new function record.
fn write_function_mappings_and_record(
&mut self,
instance: Instance<'tcx>,
mut function_coverage: FunctionCoverage,
coverage_mappings_buffer: &RustString,
) -> Option<&'ll llvm::Value> {
let cx = self.cx;
let coverageinfo: &mir::CoverageInfo = cx.tcx.coverageinfo(instance.def_id());
debug!(
"Generate coverage map for: {:?}, num_counters: {}, num_expressions: {}",
instance, coverageinfo.num_counters, coverageinfo.num_expressions
);
debug_assert!(coverageinfo.num_counters > 0);
let regions_in_file_order = function_coverage.regions_in_file_order(cx.sess().source_map());
if regions_in_file_order.len() == 0 {
return None;
}
// Stream the coverage mapping regions for the function (`instance`) to the buffer, and
// compute the data byte size used.
let old_len = coverage_mappings_buffer.len();
self.regions_to_mappings(regions_in_file_order, coverage_mappings_buffer);
let mapping_data_size = coverage_mappings_buffer.len() - old_len;
debug_assert!(mapping_data_size > 0);
let mangled_function_name = cx.tcx.symbol_name(instance).to_string();
let name_ref = coverageinfo::compute_hash(&mangled_function_name);
let function_source_hash = function_coverage.source_hash();
// Generate and return the function record
let name_ref_val = cx.const_u64(name_ref);
let mapping_data_size_val = cx.const_u32(mapping_data_size as u32);
let func_hash_val = cx.const_u64(function_source_hash);
Some(cx.const_struct(
&[name_ref_val, mapping_data_size_val, func_hash_val],
/*packed=*/ true,
))
}
/// For each coverage region, extract its coverage data from the earlier coverage analysis.
/// Use LLVM APIs to convert the data into buffered bytes compliant with the LLVM Coverage
/// Mapping format.
fn regions_to_mappings(
&mut self,
regions_in_file_order: BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>>,
coverage_mappings_buffer: &RustString,
) {
let mut virtual_file_mapping = Vec::new();
let mut mapping_regions = coverageinfo::SmallVectorCounterMappingRegion::new();
let mut expressions = coverageinfo::SmallVectorCounterExpression::new();
for (file_id, (file_path, file_coverage_regions)) in
regions_in_file_order.into_iter().enumerate()
{
let file_id = file_id as u32;
let filename = CString::new(file_path.to_string_lossy().to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", file_id, filename);
let filenames_index = match self.filename_to_index.get(&filename) {
Some(index) => *index,
None => {
let index = self.filenames.len() as u32;
self.filenames.push(filename.clone());
self.filename_to_index.insert(filename, index);
index
}
};
virtual_file_mapping.push(filenames_index);
let mut mapping_indexes = vec![0 as u32; file_coverage_regions.len()];
for (mapping_index, (region_id, _)) in file_coverage_regions.values().enumerate() {
mapping_indexes[*region_id] = mapping_index as u32;
}
for (region_loc, (region_id, region_kind)) in file_coverage_regions.into_iter() {
let mapping_index = mapping_indexes[region_id];
match region_kind {
CoverageKind::Counter => {
debug!(
" Counter {}, file_id: {}, region_loc: {}",
mapping_index, file_id, region_loc
);
mapping_regions.push_from(
mapping_index,
file_id,
region_loc.start_line,
region_loc.start_col,
region_loc.end_line,
region_loc.end_col,
);
}
CoverageKind::CounterExpression(lhs, op, rhs) => {
debug!(
" CounterExpression {} = {} {:?} {}, file_id: {}, region_loc: {:?}",
mapping_index, lhs, op, rhs, file_id, region_loc,
);
mapping_regions.push_from(
mapping_index,
file_id,
region_loc.start_line,
region_loc.start_col,
region_loc.end_line,
region_loc.end_col,
);
expressions.push_from(op, lhs, rhs);
}
CoverageKind::Unreachable => {
debug!(
" Unreachable region, file_id: {}, region_loc: {:?}",
file_id, region_loc,
);
bug!("Unreachable region not expected and not yet handled!")
// FIXME(richkadel): implement and call
// mapping_regions.push_from(...) for unreachable regions
}
}
}
}
// Encode and append the current function's coverage mapping data
coverageinfo::write_mapping_to_buffer(
virtual_file_mapping,
expressions,
mapping_regions,
coverage_mappings_buffer,
);
}
fn write_coverage_map(
self,
function_records: Vec<&'ll llvm::Value>,
filenames_buffer: Vec<u8>,
mut coverage_mappings_buffer: Vec<u8>,
) {
let cx = self.cx;
// Concatenate the encoded filenames and encoded coverage mappings, and add additional zero
// bytes as-needed to ensure 8-byte alignment.
let mut coverage_size = coverage_mappings_buffer.len();
let filenames_size = filenames_buffer.len();
let remaining_bytes =
(filenames_size + coverage_size) % coverageinfo::COVMAP_VAR_ALIGN_BYTES;
if remaining_bytes > 0 {
let pad = coverageinfo::COVMAP_VAR_ALIGN_BYTES - remaining_bytes;
coverage_mappings_buffer.append(&mut [0].repeat(pad));
coverage_size += pad;
}
let filenames_and_coverage_mappings = [filenames_buffer, coverage_mappings_buffer].concat();
let filenames_and_coverage_mappings_val =
cx.const_bytes(&filenames_and_coverage_mappings[..]);
debug!(
"cov map: n_records = {}, filenames_size = {}, coverage_size = {}, 0-based version = {}",
function_records.len(),
filenames_size,
coverage_size,
coverageinfo::mapping_version()
);
// Create the coverage data header
let n_records_val = cx.const_u32(function_records.len() as u32);
let filenames_size_val = cx.const_u32(filenames_size as u32);
let coverage_size_val = cx.const_u32(coverage_size as u32);
let version_val = cx.const_u32(coverageinfo::mapping_version());
let cov_data_header_val = cx.const_struct(
&[n_records_val, filenames_size_val, coverage_size_val, version_val],
/*packed=*/ false,
);
// Create the function records array
let name_ref_from_u64 = cx.type_i64();
let mapping_data_size_from_u32 = cx.type_i32();
let func_hash_from_u64 = cx.type_i64();
let function_record_ty = cx.type_struct(
&[name_ref_from_u64, mapping_data_size_from_u32, func_hash_from_u64],
/*packed=*/ true,
);
let function_records_val = cx.const_array(function_record_ty, &function_records[..]);
// Create the complete LLVM coverage data value to add to the LLVM IR
let cov_data_val = cx.const_struct(
&[cov_data_header_val, function_records_val, filenames_and_coverage_mappings_val],
/*packed=*/ false,
);
// Save the coverage data value to LLVM IR
coverageinfo::save_map_to_mod(cx, cov_data_val);
}
}

View file

@ -1,67 +1,44 @@
use crate::llvm;
use crate::builder::Builder;
use crate::common::CodegenCx;
use libc::c_uint;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
use rustc_codegen_ssa::traits::{CoverageInfoBuilderMethods, CoverageInfoMethods};
use rustc_codegen_ssa::traits::{
BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, StaticMethods,
};
use rustc_data_structures::fx::FxHashMap;
use rustc_llvm::RustString;
use rustc_middle::ty::Instance;
use std::cell::RefCell;
use std::ffi::CString;
pub mod mapgen;
const COVMAP_VAR_ALIGN_BYTES: usize = 8;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
pub(crate) coverage_regions: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverageRegions>>,
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
pub fn new() -> Self {
Self { coverage_regions: Default::default() }
Self { function_coverage_map: Default::default() }
}
}
/// Generates and exports the Coverage Map.
// FIXME(richkadel): Actually generate and export the coverage map to LLVM.
// The current implementation is actually just debug messages to show the data is available.
pub fn finalize(cx: &CodegenCx<'_, '_>) {
let coverage_regions = &*cx.coverage_context().coverage_regions.borrow();
for instance in coverage_regions.keys() {
let coverageinfo = cx.tcx.coverageinfo(instance.def_id());
debug_assert!(coverageinfo.num_counters > 0);
debug!(
"Generate coverage map for: {:?}, hash: {}, num_counters: {}",
instance, coverageinfo.hash, coverageinfo.num_counters
);
let function_coverage_regions = &coverage_regions[instance];
for (index, region) in function_coverage_regions.indexed_regions() {
match region.kind {
CoverageKind::Counter => debug!(
" Counter {}, for {}..{}",
index, region.coverage_span.start_byte_pos, region.coverage_span.end_byte_pos
),
CoverageKind::CounterExpression(lhs, op, rhs) => debug!(
" CounterExpression {} = {} {:?} {}, for {}..{}",
index,
lhs,
op,
rhs,
region.coverage_span.start_byte_pos,
region.coverage_span.end_byte_pos
),
}
}
for unreachable in function_coverage_regions.unreachable_regions() {
debug!(
" Unreachable code region: {}..{}",
unreachable.start_byte_pos, unreachable.end_byte_pos
);
}
pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage> {
self.function_coverage_map.replace(FxHashMap::default())
}
}
impl CoverageInfoMethods for CodegenCx<'ll, 'tcx> {
fn coverageinfo_finalize(&self) {
finalize(self)
mapgen::finalize(self)
}
}
@ -69,20 +46,22 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter to coverage map: instance={:?}, index={}, byte range {}..{}",
instance, index, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
coverage_regions.entry(instance).or_default().add_counter(
index,
start_byte_pos,
end_byte_pos,
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, index={}, byte range {}..{}",
instance, function_source_hash, index, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_counter(function_source_hash, index, start_byte_pos, end_byte_pos);
}
fn add_counter_expression_region(
@ -96,18 +75,16 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
end_byte_pos: u32,
) {
debug!(
"adding counter expression to coverage map: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
"adding counter expression to coverage_regions: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
instance, index, lhs, op, rhs, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
coverage_regions.entry(instance).or_default().add_counter_expression(
index,
lhs,
op,
rhs,
start_byte_pos,
end_byte_pos,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_counter_expression(index, lhs, op, rhs, start_byte_pos, end_byte_pos);
}
fn add_unreachable_region(
@ -117,10 +94,175 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
end_byte_pos: u32,
) {
debug!(
"adding unreachable code to coverage map: instance={:?}, byte range {}..{}",
"adding unreachable code to coverage_regions: instance={:?}, byte range {}..{}",
instance, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
coverage_regions.entry(instance).or_default().add_unreachable(start_byte_pos, end_byte_pos);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| {
FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
})
.add_unreachable(start_byte_pos, end_byte_pos);
}
}
/// This struct wraps an opaque reference to the C++ template instantiation of
/// `llvm::SmallVector<coverage::CounterExpression>`. Each `coverage::CounterExpression` object is
/// constructed from primative-typed arguments, and pushed to the `SmallVector`, in the C++
/// implementation of `LLVMRustCoverageSmallVectorCounterExpressionAdd()` (see
/// `src/rustllvm/CoverageMappingWrapper.cpp`).
pub struct SmallVectorCounterExpression<'a> {
pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterExpression<'a>,
}
impl SmallVectorCounterExpression<'a> {
pub fn new() -> Self {
SmallVectorCounterExpression {
raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterExpressionCreate() },
}
}
pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterExpression<'a> {
self.raw
}
pub fn push_from(
&mut self,
kind: rustc_codegen_ssa::coverageinfo::CounterOp,
left_index: u32,
right_index: u32,
) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterExpressionAdd(
&mut *(self.raw as *mut _),
kind,
left_index,
right_index,
)
}
}
}
impl Drop for SmallVectorCounterExpression<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterExpressionDispose(&mut *(self.raw as *mut _));
}
}
}
/// This struct wraps an opaque reference to the C++ template instantiation of
/// `llvm::SmallVector<coverage::CounterMappingRegion>`. Each `coverage::CounterMappingRegion`
/// object is constructed from primative-typed arguments, and pushed to the `SmallVector`, in the
/// C++ implementation of `LLVMRustCoverageSmallVectorCounterMappingRegionAdd()` (see
/// `src/rustllvm/CoverageMappingWrapper.cpp`).
pub struct SmallVectorCounterMappingRegion<'a> {
pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterMappingRegion<'a>,
}
impl SmallVectorCounterMappingRegion<'a> {
pub fn new() -> Self {
SmallVectorCounterMappingRegion {
raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterMappingRegionCreate() },
}
}
pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterMappingRegion<'a> {
self.raw
}
pub fn push_from(
&mut self,
index: u32,
file_id: u32,
line_start: u32,
column_start: u32,
line_end: u32,
column_end: u32,
) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
&mut *(self.raw as *mut _),
index,
file_id,
line_start,
column_start,
line_end,
column_end,
)
}
}
}
impl Drop for SmallVectorCounterMappingRegion<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
&mut *(self.raw as *mut _),
);
}
}
}
pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec<CString>, buffer: &RustString) {
let c_str_vec = filenames.iter().map(|cstring| cstring.as_ptr()).collect::<Vec<_>>();
unsafe {
llvm::LLVMRustCoverageWriteFilenamesSectionToBuffer(
c_str_vec.as_ptr(),
c_str_vec.len(),
buffer,
);
}
}
pub(crate) fn write_mapping_to_buffer(
virtual_file_mapping: Vec<u32>,
expressions: SmallVectorCounterExpression<'_>,
mapping_regions: SmallVectorCounterMappingRegion<'_>,
buffer: &RustString,
) {
unsafe {
llvm::LLVMRustCoverageWriteMappingToBuffer(
virtual_file_mapping.as_ptr(),
virtual_file_mapping.len() as c_uint,
expressions.as_ptr(),
mapping_regions.as_ptr(),
buffer,
);
}
}
pub(crate) fn compute_hash(name: &str) -> u64 {
let name = CString::new(name).expect("null error converting hashable name to C string");
unsafe { llvm::LLVMRustCoverageComputeHash(name.as_ptr()) }
}
pub(crate) fn mapping_version() -> u32 {
unsafe { llvm::LLVMRustCoverageMappingVersion() }
}
pub(crate) fn save_map_to_mod<'ll, 'tcx>(
cx: &CodegenCx<'ll, 'tcx>,
cov_data_val: &'ll llvm::Value,
) {
let covmap_var_name = llvm::build_string(|s| unsafe {
llvm::LLVMRustCoverageWriteMappingVarNameToString(s);
})
.expect("Rust Coverage Mapping var name failed UTF-8 conversion");
debug!("covmap var name: {:?}", covmap_var_name);
let covmap_section_name = llvm::build_string(|s| unsafe {
llvm::LLVMRustCoverageWriteSectionNameToString(cx.llmod, s);
})
.expect("Rust Coverage section name failed UTF-8 conversion");
debug!("covmap section name: {:?}", covmap_section_name);
let llglobal = llvm::add_global(cx.llmod, cx.val_ty(cov_data_val), &covmap_var_name);
llvm::set_initializer(llglobal, cov_data_val);
llvm::set_global_constant(llglobal, true);
llvm::set_linkage(llglobal, llvm::Linkage::InternalLinkage);
llvm::set_section(llglobal, &covmap_section_name);
llvm::set_alignment(llglobal, COVMAP_VAR_ALIGN_BYTES);
cx.add_used_global(llglobal);
}

View file

@ -90,45 +90,64 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
args: &Vec<Operand<'tcx>>,
caller_instance: ty::Instance<'tcx>,
) -> bool {
match intrinsic {
sym::count_code_region => {
use coverage::count_code_region_args::*;
self.add_counter_region(
caller_instance,
op_to_u32(&args[COUNTER_INDEX]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
true // Also inject the counter increment in the backend
if self.tcx.sess.opts.debugging_opts.instrument_coverage {
// Add the coverage information from the MIR to the Codegen context. Some coverage
// intrinsics are used only to pass along the coverage information (returns `false`
// for `is_codegen_intrinsic()`), but `count_code_region` is also converted into an
// LLVM intrinsic to increment a coverage counter.
match intrinsic {
sym::count_code_region => {
use coverage::count_code_region_args::*;
self.add_counter_region(
caller_instance,
op_to_u64(&args[FUNCTION_SOURCE_HASH]),
op_to_u32(&args[COUNTER_INDEX]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
return true; // Also inject the counter increment in the backend
}
sym::coverage_counter_add | sym::coverage_counter_subtract => {
use coverage::coverage_counter_expression_args::*;
self.add_counter_expression_region(
caller_instance,
op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
op_to_u32(&args[LEFT_INDEX]),
if intrinsic == sym::coverage_counter_add {
CounterOp::Add
} else {
CounterOp::Subtract
},
op_to_u32(&args[RIGHT_INDEX]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
return false; // Does not inject backend code
}
sym::coverage_unreachable => {
use coverage::coverage_unreachable_args::*;
self.add_unreachable_region(
caller_instance,
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
return false; // Does not inject backend code
}
_ => {}
}
sym::coverage_counter_add | sym::coverage_counter_subtract => {
use coverage::coverage_counter_expression_args::*;
self.add_counter_expression_region(
caller_instance,
op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
op_to_u32(&args[LEFT_INDEX]),
if intrinsic == sym::coverage_counter_add {
CounterOp::Add
} else {
CounterOp::Subtract
},
op_to_u32(&args[RIGHT_INDEX]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
false // Does not inject backend code
} else {
// NOT self.tcx.sess.opts.debugging_opts.instrument_coverage
if intrinsic == sym::count_code_region {
// An external crate may have been pre-compiled with coverage instrumentation, and
// some references from the current crate to the external crate might carry along
// the call terminators to coverage intrinsics, like `count_code_region` (for
// example, when instantiating a generic function). If the current crate has
// `instrument_coverage` disabled, the `count_code_region` call terminators should
// be ignored.
return false; // Do not inject coverage counters inlined from external crates
}
sym::coverage_unreachable => {
use coverage::coverage_unreachable_args::*;
self.add_unreachable_region(
caller_instance,
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
false // Does not inject backend code
}
_ => true, // Unhandled intrinsics should be passed to `codegen_intrinsic_call()`
}
true // Unhandled intrinsics should be passed to `codegen_intrinsic_call()`
}
fn codegen_intrinsic_call(
@ -197,12 +216,13 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
let coverageinfo = tcx.coverageinfo(caller_instance.def_id());
let mangled_fn = tcx.symbol_name(caller_instance);
let (mangled_fn_name, _len_val) = self.const_str(Symbol::intern(mangled_fn.name));
let hash = self.const_u64(coverageinfo.hash);
let num_counters = self.const_u32(coverageinfo.num_counters);
use coverage::count_code_region_args::*;
let hash = args[FUNCTION_SOURCE_HASH].immediate();
let index = args[COUNTER_INDEX].immediate();
debug!(
"count_code_region to LLVM intrinsic instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
"translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \
instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
mangled_fn.name, hash, num_counters, index,
);
self.instrprof_increment(mangled_fn_name, hash, num_counters, index)
@ -2222,3 +2242,7 @@ fn float_type_width(ty: Ty<'_>) -> Option<u64> {
fn op_to_u32<'tcx>(op: &Operand<'tcx>) -> u32 {
Operand::scalar_from_const(op).to_u32().expect("Scalar is u32")
}
fn op_to_u64<'tcx>(op: &Operand<'tcx>) -> u64 {
Operand::scalar_from_const(op).to_u64().expect("Scalar is u64")
}

View file

@ -1,6 +1,8 @@
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
use super::coverageinfo::{SmallVectorCounterExpression, SmallVectorCounterMappingRegion};
use super::debuginfo::{
DIArray, DIBasicType, DIBuilder, DICompositeType, DIDerivedType, DIDescriptor, DIEnumerator,
DIFile, DIFlags, DIGlobalVariableExpression, DILexicalBlock, DINameSpace, DISPFlags, DIScope,
@ -650,6 +652,16 @@ pub struct Linker<'a>(InvariantOpaque<'a>);
pub type DiagnosticHandler = unsafe extern "C" fn(&DiagnosticInfo, *mut c_void);
pub type InlineAsmDiagHandler = unsafe extern "C" fn(&SMDiagnostic, *const c_void, c_uint);
pub mod coverageinfo {
use super::InvariantOpaque;
#[repr(C)]
pub struct SmallVectorCounterExpression<'a>(InvariantOpaque<'a>);
#[repr(C)]
pub struct SmallVectorCounterMappingRegion<'a>(InvariantOpaque<'a>);
}
pub mod debuginfo {
use super::{InvariantOpaque, Metadata};
use bitflags::bitflags;
@ -1365,7 +1377,7 @@ extern "C" {
// Miscellaneous instructions
pub fn LLVMBuildPhi(B: &Builder<'a>, Ty: &'a Type, Name: *const c_char) -> &'a Value;
pub fn LLVMRustGetInstrprofIncrementIntrinsic(M: &Module) -> &'a Value;
pub fn LLVMRustGetInstrProfIncrementIntrinsic(M: &Module) -> &'a Value;
pub fn LLVMRustBuildCall(
B: &Builder<'a>,
Fn: &'a Value,
@ -1633,6 +1645,58 @@ extern "C" {
ConstraintsLen: size_t,
) -> bool;
pub fn LLVMRustCoverageSmallVectorCounterExpressionCreate()
-> &'a mut SmallVectorCounterExpression<'a>;
pub fn LLVMRustCoverageSmallVectorCounterExpressionDispose(
Container: &'a mut SmallVectorCounterExpression<'a>,
);
pub fn LLVMRustCoverageSmallVectorCounterExpressionAdd(
Container: &mut SmallVectorCounterExpression<'a>,
Kind: rustc_codegen_ssa::coverageinfo::CounterOp,
LeftIndex: c_uint,
RightIndex: c_uint,
);
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionCreate()
-> &'a mut SmallVectorCounterMappingRegion<'a>;
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
Container: &'a mut SmallVectorCounterMappingRegion<'a>,
);
pub fn LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
Container: &mut SmallVectorCounterMappingRegion<'a>,
Index: c_uint,
FileID: c_uint,
LineStart: c_uint,
ColumnStart: c_uint,
LineEnd: c_uint,
ColumnEnd: c_uint,
);
#[allow(improper_ctypes)]
pub fn LLVMRustCoverageWriteFilenamesSectionToBuffer(
Filenames: *const *const c_char,
FilenamesLen: size_t,
BufferOut: &RustString,
);
#[allow(improper_ctypes)]
pub fn LLVMRustCoverageWriteMappingToBuffer(
VirtualFileMappingIDs: *const c_uint,
NumVirtualFileMappingIDs: c_uint,
Expressions: *const SmallVectorCounterExpression<'_>,
MappingRegions: *const SmallVectorCounterMappingRegion<'_>,
BufferOut: &RustString,
);
pub fn LLVMRustCoverageComputeHash(Name: *const c_char) -> u64;
#[allow(improper_ctypes)]
pub fn LLVMRustCoverageWriteSectionNameToString(M: &Module, Str: &RustString);
#[allow(improper_ctypes)]
pub fn LLVMRustCoverageWriteMappingVarNameToString(Str: &RustString);
pub fn LLVMRustCoverageMappingVersion() -> u32;
pub fn LLVMRustDebugMetadataVersion() -> u32;
pub fn LLVMRustVersionMajor() -> u32;
pub fn LLVMRustVersionMinor() -> u32;

View file

@ -12,7 +12,7 @@ use libc::c_uint;
use rustc_data_structures::small_c_str::SmallCStr;
use rustc_llvm::RustString;
use std::cell::RefCell;
use std::ffi::CStr;
use std::ffi::{CStr, CString};
use std::str::FromStr;
use std::string::FromUtf8Error;
@ -189,6 +189,42 @@ pub fn mk_section_iter(llof: &ffi::ObjectFile) -> SectionIter<'_> {
unsafe { SectionIter { llsi: LLVMGetSections(llof) } }
}
pub fn set_section(llglobal: &Value, section_name: &str) {
let section_name_cstr = CString::new(section_name).expect("unexpected CString error");
unsafe {
LLVMSetSection(llglobal, section_name_cstr.as_ptr());
}
}
pub fn add_global<'a>(llmod: &'a Module, ty: &'a Type, name: &str) -> &'a Value {
let name_cstr = CString::new(name).expect("unexpected CString error");
unsafe { LLVMAddGlobal(llmod, ty, name_cstr.as_ptr()) }
}
pub fn set_initializer(llglobal: &Value, constant_val: &Value) {
unsafe {
LLVMSetInitializer(llglobal, constant_val);
}
}
pub fn set_global_constant(llglobal: &Value, is_constant: bool) {
unsafe {
LLVMSetGlobalConstant(llglobal, if is_constant { ffi::True } else { ffi::False });
}
}
pub fn set_linkage(llglobal: &Value, linkage: Linkage) {
unsafe {
LLVMRustSetLinkage(llglobal, linkage);
}
}
pub fn set_alignment(llglobal: &Value, bytes: usize) {
unsafe {
ffi::LLVMSetAlignment(llglobal, bytes as c_uint);
}
}
/// Safe wrapper around `LLVMGetParam`, because segfaults are no fun.
pub fn get_param(llfn: &Value, index: c_uint) -> &Value {
unsafe {
@ -225,6 +261,12 @@ pub fn build_string(f: impl FnOnce(&RustString)) -> Result<String, FromUtf8Error
String::from_utf8(sr.bytes.into_inner())
}
pub fn build_byte_buffer(f: impl FnOnce(&RustString)) -> Vec<u8> {
let sr = RustString { bytes: RefCell::new(Vec::new()) };
f(&sr);
sr.bytes.into_inner()
}
pub fn twine_to_string(tr: &Twine) -> String {
unsafe {
build_string(|s| LLVMRustWriteTwineToString(tr, s)).expect("got a non-UTF8 Twine from LLVM")

View file

@ -1659,7 +1659,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
// FIXME: Order dependent, applies to the following objects. Where should it be placed?
// Try to strip as much out of the generated object by removing unused
// sections if possible. See more comments in linker.rs
if !sess.opts.cg.link_dead_code {
if sess.opts.cg.link_dead_code != Some(true) {
let keep_metadata = crate_type == CrateType::Dylib;
cmd.gc_sections(keep_metadata);
}
@ -1695,7 +1695,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
);
// OBJECT-FILES-NO, AUDIT-ORDER
if sess.opts.cg.profile_generate.enabled() {
if sess.opts.cg.profile_generate.enabled() || sess.opts.debugging_opts.instrument_coverage {
cmd.pgo_gen();
}

View file

@ -28,9 +28,7 @@ use rustc_target::spec::{LinkOutputKind, LinkerFlavor, LldFlavor};
pub fn disable_localization(linker: &mut Command) {
// No harm in setting both env vars simultaneously.
// Unix-style linkers.
// We use an UTF-8 locale, as the generic C locale disables support for non-ASCII
// bytes in filenames on some platforms.
linker.env("LC_ALL", "en_US.UTF-8");
linker.env("LC_ALL", "C");
// MSVC's `link.exe`.
linker.env("VSLANG", "1033");
}

View file

@ -203,6 +203,17 @@ fn exported_symbols_provider_local(
}));
}
if tcx.sess.opts.debugging_opts.instrument_coverage {
// Similar to PGO profiling, preserve symbols used by LLVM InstrProf coverage profiling.
const COVERAGE_WEAK_SYMBOLS: [&str; 3] =
["__llvm_profile_filename", "__llvm_coverage_mapping", "__llvm_covmap"];
symbols.extend(COVERAGE_WEAK_SYMBOLS.iter().map(|sym| {
let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, sym));
(exported_symbol, SymbolExportLevel::C)
}));
}
if tcx.sess.opts.debugging_opts.sanitizer.contains(SanitizerSet::MEMORY) {
// Similar to profiling, preserve weak msan symbol during LTO.
const MSAN_WEAK_SYMBOLS: [&str; 2] = ["__msan_track_origins", "__msan_keep_going"];

View file

@ -1,32 +1,154 @@
use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map;
use std::slice;
use rustc_data_structures::sync::Lrc;
use rustc_middle::mir;
use rustc_span::source_map::{Pos, SourceFile, SourceMap};
use rustc_span::{BytePos, FileName, RealFileName};
use std::cmp::{Ord, Ordering};
use std::collections::BTreeMap;
use std::fmt;
use std::path::PathBuf;
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub enum CounterOp {
Add,
// Note the order (and therefore the default values) is important. With the attribute
// `#[repr(C)]`, this enum matches the layout of the LLVM enum defined for the nested enum,
// `llvm::coverage::CounterExpression::ExprKind`, as shown in the following source snippet:
// https://github.com/rust-lang/llvm-project/blob/f208b70fbc4dee78067b3c5bd6cb92aa3ba58a1e/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L146
Subtract,
Add,
}
#[derive(Copy, Clone, Debug)]
pub enum CoverageKind {
Counter,
CounterExpression(u32, CounterOp, u32),
Unreachable,
}
pub struct CoverageSpan {
#[derive(Clone, Debug)]
pub struct CoverageRegion {
pub kind: CoverageKind,
pub start_byte_pos: u32,
pub end_byte_pos: u32,
}
pub struct CoverageRegion {
pub kind: CoverageKind,
pub coverage_span: CoverageSpan,
impl CoverageRegion {
pub fn source_loc(&self, source_map: &SourceMap) -> Option<(Lrc<SourceFile>, CoverageLoc)> {
let (start_file, start_line, start_col) =
lookup_file_line_col(source_map, BytePos::from_u32(self.start_byte_pos));
let (end_file, end_line, end_col) =
lookup_file_line_col(source_map, BytePos::from_u32(self.end_byte_pos));
let start_file_path = match &start_file.name {
FileName::Real(RealFileName::Named(path)) => path,
_ => {
bug!("start_file_path should be a RealFileName, but it was: {:?}", start_file.name)
}
};
let end_file_path = match &end_file.name {
FileName::Real(RealFileName::Named(path)) => path,
_ => bug!("end_file_path should be a RealFileName, but it was: {:?}", end_file.name),
};
if start_file_path == end_file_path {
Some((start_file, CoverageLoc { start_line, start_col, end_line, end_col }))
} else {
None
// FIXME(richkadel): There seems to be a problem computing the file location in
// some cases. I need to investigate this more. When I generate and show coverage
// for the example binary in the crates.io crate `json5format`, I had a couple of
// notable problems:
//
// 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in
// various comments (not corresponding to rustdoc code), indicating a possible
// problem with the byte_pos-to-source-map implementation.
//
// 2. And (perhaps not related) when I build the aforementioned example binary with:
// `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5`
// and then run that binary with
// `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \
// some.json5` for some reason the binary generates *TWO* `.profraw` files. One
// named `default.profraw` and the other named `formatjson5.profraw` (the expected
// name, in this case).
//
// If the byte range conversion is wrong, fix it. But if it
// is right, then it is possible for the start and end to be in different files.
// Can I do something other than ignore coverages that span multiple files?
//
// If I can resolve this, remove the "Option<>" result type wrapper
// `regions_in_file_order()` accordingly.
}
}
}
impl Default for CoverageRegion {
fn default() -> Self {
Self {
// The default kind (Unreachable) is a placeholder that will be overwritten before
// backend codegen.
kind: CoverageKind::Unreachable,
start_byte_pos: 0,
end_byte_pos: 0,
}
}
}
/// A source code region used with coverage information.
#[derive(Debug, Eq, PartialEq)]
pub struct CoverageLoc {
/// The (1-based) line number of the region start.
pub start_line: u32,
/// The (1-based) column number of the region start.
pub start_col: u32,
/// The (1-based) line number of the region end.
pub end_line: u32,
/// The (1-based) column number of the region end.
pub end_col: u32,
}
impl Ord for CoverageLoc {
fn cmp(&self, other: &Self) -> Ordering {
(self.start_line, &self.start_col, &self.end_line, &self.end_col).cmp(&(
other.start_line,
&other.start_col,
&other.end_line,
&other.end_col,
))
}
}
impl PartialOrd for CoverageLoc {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl fmt::Display for CoverageLoc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Customize debug format, and repeat the file name, so generated location strings are
// "clickable" in many IDEs.
write!(f, "{}:{} - {}:{}", self.start_line, self.start_col, self.end_line, self.end_col)
}
}
fn lookup_file_line_col(source_map: &SourceMap, byte_pos: BytePos) -> (Lrc<SourceFile>, u32, u32) {
let found = source_map
.lookup_line(byte_pos)
.expect("should find coverage region byte position in source");
let file = found.sf;
let line_pos = file.line_begin_pos(byte_pos);
// Use 1-based indexing.
let line = (found.line + 1) as u32;
let col = (byte_pos - line_pos).to_u32() + 1;
(file, line, col)
}
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
/// for a given Function. Counters and counter expressions are indexed because they can be operands
/// in an expression.
/// in an expression. This struct also stores the `function_source_hash`, computed during
/// instrumentation and forwarded with counters.
///
/// Note, it's important to distinguish the `unreachable` region type from what LLVM's refers to as
/// a "gap region" (or "gap area"). A gap region is a code region within a counted region (either
@ -34,50 +156,134 @@ pub struct CoverageRegion {
/// lines with only whitespace or comments). According to LLVM Code Coverage Mapping documentation,
/// "A count for a gap area is only used as the line execution count if there are no other regions
/// on a line."
#[derive(Default)]
pub struct FunctionCoverageRegions {
indexed: FxHashMap<u32, CoverageRegion>,
unreachable: Vec<CoverageSpan>,
pub struct FunctionCoverage {
source_hash: u64,
counters: Vec<CoverageRegion>,
expressions: Vec<CoverageRegion>,
unreachable: Vec<CoverageRegion>,
translated: bool,
}
impl FunctionCoverageRegions {
pub fn add_counter(&mut self, index: u32, start_byte_pos: u32, end_byte_pos: u32) {
self.indexed.insert(
index,
CoverageRegion {
kind: CoverageKind::Counter,
coverage_span: CoverageSpan { start_byte_pos, end_byte_pos },
},
);
impl FunctionCoverage {
pub fn with_coverageinfo<'tcx>(coverageinfo: &'tcx mir::CoverageInfo) -> Self {
Self {
source_hash: 0, // will be set with the first `add_counter()`
counters: vec![CoverageRegion::default(); coverageinfo.num_counters as usize],
expressions: vec![CoverageRegion::default(); coverageinfo.num_expressions as usize],
unreachable: Vec::new(),
translated: false,
}
}
/// Adds a code region to be counted by an injected counter intrinsic. Return a counter ID
/// for the call.
pub fn add_counter(
&mut self,
source_hash: u64,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
self.source_hash = source_hash;
self.counters[index as usize] =
CoverageRegion { kind: CoverageKind::Counter, start_byte_pos, end_byte_pos };
}
pub fn add_counter_expression(
&mut self,
index: u32,
translated_index: u32,
lhs: u32,
op: CounterOp,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
self.indexed.insert(
index,
CoverageRegion {
kind: CoverageKind::CounterExpression(lhs, op, rhs),
coverage_span: CoverageSpan { start_byte_pos, end_byte_pos },
},
);
let index = u32::MAX - translated_index;
// Counter expressions start with "translated indexes", descending from `u32::MAX`, so
// the range of expression indexes is disjoint from the range of counter indexes. This way,
// both counters and expressions can be operands in other expressions.
//
// Once all counters have been added, the final "region index" for an expression is
// `counters.len() + expression_index` (where `expression_index` is its index in
// `self.expressions`), and the expression operands (`lhs` and `rhs`) can be converted to
// final "region index" references by the same conversion, after subtracting from
// `u32::MAX`.
self.expressions[index as usize] = CoverageRegion {
kind: CoverageKind::CounterExpression(lhs, op, rhs),
start_byte_pos,
end_byte_pos,
};
}
pub fn add_unreachable(&mut self, start_byte_pos: u32, end_byte_pos: u32) {
self.unreachable.push(CoverageSpan { start_byte_pos, end_byte_pos });
self.unreachable.push(CoverageRegion {
kind: CoverageKind::Unreachable,
start_byte_pos,
end_byte_pos,
});
}
pub fn indexed_regions(&self) -> hash_map::Iter<'_, u32, CoverageRegion> {
self.indexed.iter()
pub fn source_hash(&self) -> u64 {
self.source_hash
}
pub fn unreachable_regions(&self) -> slice::Iter<'_, CoverageSpan> {
self.unreachable.iter()
fn regions(&'a mut self) -> impl Iterator<Item = &'a CoverageRegion> {
assert!(self.source_hash != 0);
self.ensure_expressions_translated();
self.counters.iter().chain(self.expressions.iter().chain(self.unreachable.iter()))
}
pub fn regions_in_file_order(
&'a mut self,
source_map: &SourceMap,
) -> BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>> {
let mut regions_in_file_order = BTreeMap::new();
for (region_id, region) in self.regions().enumerate() {
if let Some((source_file, region_loc)) = region.source_loc(source_map) {
// FIXME(richkadel): `region.source_loc()` sometimes fails with two different
// filenames for the start and end byte position. This seems wrong, but for
// now, if encountered, the region is skipped. If resolved, convert the result
// to a non-option value so regions are never skipped.
let real_file_path = match &(*source_file).name {
FileName::Real(RealFileName::Named(path)) => path.clone(),
_ => bug!("coverage mapping expected only real, named files"),
};
let file_coverage_regions =
regions_in_file_order.entry(real_file_path).or_insert_with(|| BTreeMap::new());
file_coverage_regions.insert(region_loc, (region_id, region.kind));
}
}
regions_in_file_order
}
/// A one-time translation of expression operands is needed, for any operands referencing
/// other CounterExpressions. CounterExpression operands get an initial operand ID that is
/// computed by the simple translation: `u32::max - expression_index` because, when created,
/// the total number of Counters is not yet known. This function recomputes region indexes
/// for expressions so they start with the next region index after the last counter index.
fn ensure_expressions_translated(&mut self) {
if !self.translated {
self.translated = true;
let start = self.counters.len() as u32;
assert!(
(start as u64 + self.expressions.len() as u64) < u32::MAX as u64,
"the number of counters and counter expressions in a single function exceeds {}",
u32::MAX
);
for region in self.expressions.iter_mut() {
match region.kind {
CoverageKind::CounterExpression(lhs, op, rhs) => {
let lhs = to_region_index(start, lhs);
let rhs = to_region_index(start, rhs);
region.kind = CoverageKind::CounterExpression(lhs, op, rhs);
}
_ => bug!("expressions must only contain CounterExpression kinds"),
}
}
}
}
}
fn to_region_index(start: u32, index: u32) -> u32 {
if index < start { index } else { start + (u32::MAX - index) }
}

View file

@ -10,6 +10,7 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,

View file

@ -5,6 +5,18 @@ use rustc_target::abi::Align;
pub trait StaticMethods: BackendTypes {
fn static_addr_of(&self, cv: Self::Value, align: Align, kind: Option<&str>) -> Self::Value;
fn codegen_static(&self, def_id: DefId, is_mutable: bool);
/// Mark the given global value as "used", to prevent a backend from potentially removing a
/// static variable that may otherwise appear unused.
///
/// Static variables in Rust can be annotated with the `#[used]` attribute to direct the `rustc`
/// compiler to mark the variable as a "used global".
///
/// ```no_run
/// #[used]
/// static FOO: u32 = 0;
/// ```
fn add_used_global(&self, global: Self::Value);
}
pub trait StaticBuilderMethods: BackendTypes {

View file

@ -554,7 +554,7 @@ E0770: include_str!("./error_codes/E0770.md"),
// E0420, merged into 532
// E0421, merged into 531
// E0427, merged into 530
E0456, // plugin `..` is not available for triple `..`
// E0456, // plugin `..` is not available for triple `..`
E0457, // plugin `..` only found in rlib format, but must be available...
E0460, // found possibly newer version of crate `..`
E0461, // couldn't find crate `..` with expected target triple ..

View file

@ -0,0 +1,37 @@
//! Validity checking for fake lang items
use crate::def_id::DefId;
use crate::{lang_items, LangItem, LanguageItems};
use rustc_data_structures::fx::FxHashMap;
use rustc_span::symbol::{sym, Symbol};
use lazy_static::lazy_static;
macro_rules! fake_lang_items {
($($item:ident, $name:ident, $method:ident;)*) => (
lazy_static! {
pub static ref FAKE_ITEMS_REFS: FxHashMap<Symbol, LangItem> = {
let mut map = FxHashMap::default();
$(map.insert(sym::$name, lang_items::$item);)*
map
};
}
impl LanguageItems {
pub fn is_fake_lang_item(&self, item_def_id: DefId) -> bool {
let did = Some(item_def_id);
$(self.$method() == did)||*
}
}
) }
fake_lang_items! {
// Variant name, Symbol, Method name,
CountCodeRegionFnLangItem, count_code_region, count_code_region_fn;
CoverageCounterAddFnLangItem, coverage_counter_add, coverage_counter_add_fn;
CoverageCounterSubtractFnLangItem, coverage_counter_subtract, coverage_counter_subtract_fn;
}

View file

@ -276,8 +276,6 @@ language_item_table! {
StartFnLangItem, sym::start, start_fn, Target::Fn;
CountCodeRegionFnLangItem, sym::count_code_region, count_code_region_fn, Target::Fn;
EhPersonalityLangItem, sym::eh_personality, eh_personality, Target::Fn;
EhCatchTypeinfoLangItem, sym::eh_catch_typeinfo, eh_catch_typeinfo, Target::Static;
@ -295,4 +293,9 @@ language_item_table! {
TerminationTraitLangItem, sym::termination, termination, Target::Trait;
TryTraitLangItem, kw::Try, try_trait, Target::Trait;
// language items related to source code coverage instrumentation (-Zinstrument-coverage)
CountCodeRegionFnLangItem, sym::count_code_region, count_code_region_fn, Target::Fn;
CoverageCounterAddFnLangItem, sym::coverage_counter_add, coverage_counter_add_fn, Target::Fn;
CoverageCounterSubtractFnLangItem, sym::coverage_counter_subtract, coverage_counter_subtract_fn, Target::Fn;
}

View file

@ -17,6 +17,7 @@ mod arena;
pub mod def;
pub mod definitions;
pub use rustc_span::def_id;
pub mod fake_lang_items;
mod hir;
pub mod hir_id;
pub mod intravisit;

View file

@ -401,7 +401,7 @@ fn test_codegen_options_tracking_hash() {
untracked!(incremental, Some(String::from("abc")));
// `link_arg` is omitted because it just forwards to `link_args`.
untracked!(link_args, vec![String::from("abc"), String::from("def")]);
untracked!(link_dead_code, true);
untracked!(link_dead_code, Some(true));
untracked!(linker, Some(PathBuf::from("linker")));
untracked!(linker_flavor, Some(LinkerFlavor::Gcc));
untracked!(no_stack_check, true);

View file

@ -104,8 +104,16 @@ fn main() {
optional_components.push("riscv");
}
let required_components =
&["ipo", "bitreader", "bitwriter", "linker", "asmparser", "lto", "instrumentation"];
let required_components = &[
"ipo",
"bitreader",
"bitwriter",
"linker",
"asmparser",
"lto",
"coverage",
"instrumentation",
];
let components = output(Command::new(&llvm_config).arg("--components"));
let mut components = components.split_whitespace().collect::<Vec<_>>();
@ -169,6 +177,7 @@ fn main() {
cfg.file("../rustllvm/PassWrapper.cpp")
.file("../rustllvm/RustWrapper.cpp")
.file("../rustllvm/ArchiveWrapper.cpp")
.file("../rustllvm/CoverageMappingWrapper.cpp")
.file("../rustllvm/Linker.cpp")
.cpp(true)
.cpp_link_stdlib(None) // we handle this below

View file

@ -13,6 +13,12 @@ pub struct RustString {
pub bytes: RefCell<Vec<u8>>,
}
impl RustString {
pub fn len(&self) -> usize {
self.bytes.borrow().len()
}
}
/// Appending to a Rust string -- used by RawRustStringOstream.
#[no_mangle]
#[allow(improper_ctypes_definitions)]

View file

@ -1,6 +1,7 @@
//! Validates all used crates and extern libraries and loads their metadata
use crate::locator::{CrateLocator, CratePaths};
use crate::dynamic_lib::DynamicLibrary;
use crate::locator::{CrateError, CrateLocator, CratePaths};
use crate::rmeta::{CrateDep, CrateMetadata, CrateNumMap, CrateRoot, MetadataBlob};
use rustc_ast::expand::allocator::{global_allocator_spans, AllocatorKind};
@ -8,15 +9,12 @@ use rustc_ast::{ast, attr};
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::Lrc;
use rustc_errors::struct_span_err;
use rustc_expand::base::SyntaxExtension;
use rustc_hir::def_id::{CrateNum, LocalDefId, LOCAL_CRATE};
use rustc_hir::definitions::Definitions;
use rustc_index::vec::IndexVec;
use rustc_middle::middle::cstore::DepKind;
use rustc_middle::middle::cstore::{
CrateSource, ExternCrate, ExternCrateSource, MetadataLoaderDyn,
};
use rustc_middle::middle::cstore::{CrateSource, DepKind, ExternCrate};
use rustc_middle::middle::cstore::{ExternCrateSource, MetadataLoaderDyn};
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{self, CrateType, ExternLocation};
use rustc_session::lint;
@ -31,7 +29,7 @@ use rustc_target::spec::{PanicStrategy, TargetTriple};
use log::{debug, info, log_enabled};
use proc_macro::bridge::client::ProcMacro;
use std::path::Path;
use std::{cmp, fs};
use std::{cmp, env, fs};
#[derive(Clone)]
pub struct CStore {
@ -69,18 +67,6 @@ enum LoadResult {
Loaded(Library),
}
enum LoadError<'a> {
LocatorError(CrateLocator<'a>),
}
impl<'a> LoadError<'a> {
fn report(self) -> ! {
match self {
LoadError::LocatorError(locator) => locator.report_errs(),
}
}
}
/// A reference to `CrateMetadata` that can also give access to whole crate store when necessary.
#[derive(Clone, Copy)]
crate struct CrateMetadataRef<'a> {
@ -280,60 +266,43 @@ impl<'a> CrateLoader<'a> {
ret
}
fn verify_no_symbol_conflicts(&self, span: Span, root: &CrateRoot<'_>) {
fn verify_no_symbol_conflicts(&self, root: &CrateRoot<'_>) -> Result<(), CrateError> {
// Check for (potential) conflicts with the local crate
if self.local_crate_name == root.name()
&& self.sess.local_crate_disambiguator() == root.disambiguator()
{
struct_span_err!(
self.sess,
span,
E0519,
"the current crate is indistinguishable from one of its \
dependencies: it has the same crate-name `{}` and was \
compiled with the same `-C metadata` arguments. This \
will result in symbol conflicts between the two.",
root.name()
)
.emit()
return Err(CrateError::SymbolConflictsCurrent(root.name()));
}
// Check for conflicts with any crate loaded so far
let mut res = Ok(());
self.cstore.iter_crate_data(|_, other| {
if other.name() == root.name() && // same crate-name
other.disambiguator() == root.disambiguator() && // same crate-disambiguator
other.disambiguator() == root.disambiguator() && // same crate-disambiguator
other.hash() != root.hash()
{
// but different SVH
struct_span_err!(
self.sess,
span,
E0523,
"found two different crates with name `{}` that are \
not distinguished by differing `-C metadata`. This \
will result in symbol conflicts between the two.",
root.name()
)
.emit();
res = Err(CrateError::SymbolConflictsOthers(root.name()));
}
});
res
}
fn register_crate(
&mut self,
host_lib: Option<Library>,
root: Option<&CratePaths>,
span: Span,
lib: Library,
dep_kind: DepKind,
name: Symbol,
) -> CrateNum {
) -> Result<CrateNum, CrateError> {
let _prof_timer = self.sess.prof.generic_activity("metadata_register_crate");
let Library { source, metadata } = lib;
let crate_root = metadata.get_root();
let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash());
self.verify_no_symbol_conflicts(span, &crate_root);
self.verify_no_symbol_conflicts(&crate_root)?;
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map(|e| e.is_private_dep).unwrap_or(false);
@ -353,7 +322,7 @@ impl<'a> CrateLoader<'a> {
&crate_paths
};
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, dep_kind)?;
let raw_proc_macros = if crate_root.is_proc_macro_crate() {
let temp_root;
@ -365,7 +334,7 @@ impl<'a> CrateLoader<'a> {
None => (&source, &crate_root),
};
let dlsym_dylib = dlsym_source.dylib.as_ref().expect("no dylib for a proc-macro crate");
Some(self.dlsym_proc_macros(&dlsym_dylib.0, dlsym_root.disambiguator(), span))
Some(self.dlsym_proc_macros(&dlsym_dylib.0, dlsym_root.disambiguator())?)
} else {
None
};
@ -386,14 +355,14 @@ impl<'a> CrateLoader<'a> {
),
);
cnum
Ok(cnum)
}
fn load_proc_macro<'b>(
&self,
locator: &mut CrateLocator<'b>,
path_kind: PathKind,
) -> Option<(LoadResult, Option<Library>)>
) -> Result<Option<(LoadResult, Option<Library>)>, CrateError>
where
'a: 'b,
{
@ -408,8 +377,11 @@ impl<'a> CrateLoader<'a> {
let (locator, target_result) = if self.sess.opts.debugging_opts.dual_proc_macros {
proc_macro_locator.reset();
let result = match self.load(&mut proc_macro_locator)? {
LoadResult::Previous(cnum) => return Some((LoadResult::Previous(cnum), None)),
LoadResult::Loaded(library) => Some(LoadResult::Loaded(library)),
Some(LoadResult::Previous(cnum)) => {
return Ok(Some((LoadResult::Previous(cnum), None)));
}
Some(LoadResult::Loaded(library)) => Some(LoadResult::Loaded(library)),
None => return Ok(None),
};
locator.hash = locator.host_hash;
// Use the locator when looking for the host proc macro crate, as that is required
@ -427,9 +399,12 @@ impl<'a> CrateLoader<'a> {
locator.triple = TargetTriple::from_triple(config::host_triple());
locator.filesearch = self.sess.host_filesearch(path_kind);
let host_result = self.load(locator)?;
let host_result = match self.load(locator)? {
Some(host_result) => host_result,
None => return Ok(None),
};
Some(if self.sess.opts.debugging_opts.dual_proc_macros {
Ok(Some(if self.sess.opts.debugging_opts.dual_proc_macros {
let host_result = match host_result {
LoadResult::Previous(..) => {
panic!("host and target proc macros must be loaded in lock-step")
@ -439,7 +414,7 @@ impl<'a> CrateLoader<'a> {
(target_result.unwrap(), Some(host_result))
} else {
(host_result, None)
})
}))
}
fn resolve_crate<'b>(
@ -452,25 +427,20 @@ impl<'a> CrateLoader<'a> {
if dep.is_none() {
self.used_extern_options.insert(name);
}
if !name.as_str().is_ascii() {
self.sess
.struct_span_err(
span,
&format!("cannot load a crate with a non-ascii name `{}`", name,),
)
.emit();
}
self.maybe_resolve_crate(name, span, dep_kind, dep).unwrap_or_else(|err| err.report())
self.maybe_resolve_crate(name, dep_kind, dep)
.unwrap_or_else(|err| err.report(self.sess, span))
}
fn maybe_resolve_crate<'b>(
&'b mut self,
name: Symbol,
span: Span,
mut dep_kind: DepKind,
dep: Option<(&'b CratePaths, &'b CrateDep)>,
) -> Result<CrateNum, LoadError<'b>> {
) -> Result<CrateNum, CrateError> {
info!("resolving crate `{}`", name);
if !name.as_str().is_ascii() {
return Err(CrateError::NonAsciiName(name));
}
let (root, hash, host_hash, extra_filename, path_kind) = match dep {
Some((root, dep)) => (
Some(root),
@ -494,18 +464,20 @@ impl<'a> CrateLoader<'a> {
extra_filename,
false, // is_host
path_kind,
span,
root,
Some(false), // is_proc_macro
);
self.load(&mut locator)
.map(|r| (r, None))
.or_else(|| {
match self.load(&mut locator)? {
Some(res) => (res, None),
None => {
dep_kind = DepKind::MacrosOnly;
self.load_proc_macro(&mut locator, path_kind)
})
.ok_or_else(move || LoadError::LocatorError(locator))?
match self.load_proc_macro(&mut locator, path_kind)? {
Some(res) => res,
None => return Err(locator.into_error()),
}
}
}
};
match result {
@ -518,14 +490,17 @@ impl<'a> CrateLoader<'a> {
Ok(cnum)
}
(LoadResult::Loaded(library), host_library) => {
Ok(self.register_crate(host_library, root, span, library, dep_kind, name))
self.register_crate(host_library, root, library, dep_kind, name)
}
_ => panic!(),
}
}
fn load(&self, locator: &mut CrateLocator<'_>) -> Option<LoadResult> {
let library = locator.maybe_load_library_crate()?;
fn load(&self, locator: &mut CrateLocator<'_>) -> Result<Option<LoadResult>, CrateError> {
let library = match locator.maybe_load_library_crate()? {
Some(library) => library,
None => return Ok(None),
};
// In the case that we're loading a crate, but not matching
// against a hash, we could load a crate which has the same hash
@ -536,7 +511,7 @@ impl<'a> CrateLoader<'a> {
// don't want to match a host crate against an equivalent target one
// already loaded.
let root = library.metadata.get_root();
if locator.triple == self.sess.opts.target_triple {
Ok(Some(if locator.triple == self.sess.opts.target_triple {
let mut result = LoadResult::Loaded(library);
self.cstore.iter_crate_data(|cnum, data| {
if data.name() == root.name() && root.hash() == data.hash() {
@ -545,10 +520,10 @@ impl<'a> CrateLoader<'a> {
result = LoadResult::Previous(cnum);
}
});
Some(result)
result
} else {
Some(LoadResult::Loaded(library))
}
LoadResult::Loaded(library)
}))
}
fn update_extern_crate(&self, cnum: CrateNum, extern_crate: ExternCrate) {
@ -569,53 +544,51 @@ impl<'a> CrateLoader<'a> {
crate_root: &CrateRoot<'_>,
metadata: &MetadataBlob,
krate: CrateNum,
span: Span,
dep_kind: DepKind,
) -> CrateNumMap {
) -> Result<CrateNumMap, CrateError> {
debug!("resolving deps of external crate");
if crate_root.is_proc_macro_crate() {
return CrateNumMap::new();
return Ok(CrateNumMap::new());
}
// The map from crate numbers in the crate we're resolving to local crate numbers.
// We map 0 and all other holes in the map to our parent crate. The "additional"
// self-dependencies should be harmless.
std::iter::once(krate)
.chain(crate_root.decode_crate_deps(metadata).map(|dep| {
info!(
"resolving dep crate {} hash: `{}` extra filename: `{}`",
dep.name, dep.hash, dep.extra_filename
);
let dep_kind = match dep_kind {
DepKind::MacrosOnly => DepKind::MacrosOnly,
_ => dep.kind,
};
self.resolve_crate(dep.name, span, dep_kind, Some((root, &dep)))
}))
.collect()
let deps = crate_root.decode_crate_deps(metadata);
let mut crate_num_map = CrateNumMap::with_capacity(1 + deps.len());
crate_num_map.push(krate);
for dep in deps {
info!(
"resolving dep crate {} hash: `{}` extra filename: `{}`",
dep.name, dep.hash, dep.extra_filename
);
let dep_kind = match dep_kind {
DepKind::MacrosOnly => DepKind::MacrosOnly,
_ => dep.kind,
};
let cnum = self.maybe_resolve_crate(dep.name, dep_kind, Some((root, &dep)))?;
crate_num_map.push(cnum);
}
Ok(crate_num_map)
}
fn dlsym_proc_macros(
&self,
path: &Path,
disambiguator: CrateDisambiguator,
span: Span,
) -> &'static [ProcMacro] {
use crate::dynamic_lib::DynamicLibrary;
use std::env;
) -> Result<&'static [ProcMacro], CrateError> {
// Make sure the path contains a / or the linker will search for it.
let path = env::current_dir().unwrap().join(path);
let lib = match DynamicLibrary::open(&path) {
Ok(lib) => lib,
Err(err) => self.sess.span_fatal(span, &err),
Err(s) => return Err(CrateError::DlOpen(s)),
};
let sym = self.sess.generate_proc_macro_decls_symbol(disambiguator);
let decls = unsafe {
let sym = match lib.symbol(&sym) {
Ok(f) => f,
Err(err) => self.sess.span_fatal(span, &err),
Err(s) => return Err(CrateError::DlSym(s)),
};
*(sym as *const &[ProcMacro])
};
@ -624,7 +597,7 @@ impl<'a> CrateLoader<'a> {
// since the library can make things that will live arbitrarily long.
std::mem::forget(lib);
decls
Ok(decls)
}
fn inject_panic_runtime(&mut self, krate: &ast::Crate) {
@ -952,7 +925,7 @@ impl<'a> CrateLoader<'a> {
cnum
}
pub fn maybe_process_path_extern(&mut self, name: Symbol, span: Span) -> Option<CrateNum> {
self.maybe_resolve_crate(name, span, DepKind::Explicit, None).ok()
pub fn maybe_process_path_extern(&mut self, name: Symbol) -> Option<CrateNum> {
self.maybe_resolve_crate(name, DepKind::Explicit, None).ok()
}
}

View file

@ -216,9 +216,10 @@ use crate::creader::Library;
use crate::rmeta::{rustc_version, MetadataBlob, METADATA_HEADER};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::owning_ref::OwningRef;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::MetadataRef;
use rustc_errors::{struct_span_err, DiagnosticBuilder};
use rustc_errors::struct_span_err;
use rustc_middle::middle::cstore::{CrateSource, MetadataLoader};
use rustc_session::config::{self, CrateType};
use rustc_session::filesearch::{FileDoesntMatch, FileMatches, FileSearch};
@ -228,25 +229,12 @@ use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;
use rustc_target::spec::{Target, TargetTriple};
use std::cmp;
use std::fmt;
use std::fs;
use std::io::{self, Read};
use flate2::read::DeflateDecoder;
use log::{debug, info, warn};
use std::io::{Read, Result as IoResult, Write};
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::time::Instant;
use flate2::read::DeflateDecoder;
use rustc_data_structures::owning_ref::OwningRef;
use log::{debug, info, warn};
#[derive(Clone)]
struct CrateMismatch {
path: PathBuf,
got: String,
}
use std::{cmp, fmt, fs};
#[derive(Clone)]
crate struct CrateLocator<'a> {
@ -263,7 +251,6 @@ crate struct CrateLocator<'a> {
pub target: &'a Target,
pub triple: TargetTriple,
pub filesearch: FileSearch<'a>,
span: Span,
root: Option<&'a CratePaths>,
pub is_proc_macro: Option<bool>,
@ -275,6 +262,7 @@ crate struct CrateLocator<'a> {
rejected_via_filename: Vec<CrateMismatch>,
}
#[derive(Clone)]
crate struct CratePaths {
name: Symbol,
source: CrateSource,
@ -287,7 +275,7 @@ impl CratePaths {
}
#[derive(Copy, Clone, PartialEq)]
enum CrateFlavor {
crate enum CrateFlavor {
Rlib,
Rmeta,
Dylib,
@ -313,7 +301,6 @@ impl<'a> CrateLocator<'a> {
extra_filename: Option<&'a str>,
is_host: bool,
path_kind: PathKind,
span: Span,
root: Option<&'a CratePaths>,
is_proc_macro: Option<bool>,
) -> CrateLocator<'a> {
@ -349,7 +336,6 @@ impl<'a> CrateLocator<'a> {
} else {
sess.target_filesearch(path_kind)
},
span,
root,
is_proc_macro,
rejected_via_hash: Vec::new(),
@ -368,166 +354,30 @@ impl<'a> CrateLocator<'a> {
self.rejected_via_filename.clear();
}
crate fn maybe_load_library_crate(&mut self) -> Option<Library> {
crate fn maybe_load_library_crate(&mut self) -> Result<Option<Library>, CrateError> {
if !self.exact_paths.is_empty() {
return self.find_commandline_library();
}
let mut seen_paths = FxHashSet::default();
match self.extra_filename {
Some(s) => self
.find_library_crate(s, &mut seen_paths)
.or_else(|| self.find_library_crate("", &mut seen_paths)),
None => self.find_library_crate("", &mut seen_paths),
}
}
crate fn report_errs(self) -> ! {
let add = match self.root {
None => String::new(),
Some(r) => format!(" which `{}` depends on", r.name),
};
let mut msg = "the following crate versions were found:".to_string();
let mut err = if !self.rejected_via_hash.is_empty() {
let mut err = struct_span_err!(
self.sess,
self.span,
E0460,
"found possibly newer version of crate `{}`{}",
self.crate_name,
add
);
err.note("perhaps that crate needs to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for &CrateMismatch { ref path, .. } in mismatches {
msg.push_str(&format!("\ncrate `{}`: {}", self.crate_name, path.display()));
}
match self.root {
None => {}
Some(r) => {
for path in r.source.paths() {
msg.push_str(&format!("\ncrate `{}`: {}", r.name, path.display()));
}
}
}
err.note(&msg);
err
} else if !self.rejected_via_triple.is_empty() {
let mut err = struct_span_err!(
self.sess,
self.span,
E0461,
"couldn't find crate `{}` \
with expected target triple {}{}",
self.crate_name,
self.triple,
add
);
let mismatches = self.rejected_via_triple.iter();
for &CrateMismatch { ref path, ref got } in mismatches {
msg.push_str(&format!(
"\ncrate `{}`, target triple {}: {}",
self.crate_name,
got,
path.display()
));
}
err.note(&msg);
err
} else if !self.rejected_via_kind.is_empty() {
let mut err = struct_span_err!(
self.sess,
self.span,
E0462,
"found staticlib `{}` instead of rlib or dylib{}",
self.crate_name,
add
);
err.help("please recompile that crate using --crate-type lib");
let mismatches = self.rejected_via_kind.iter();
for &CrateMismatch { ref path, .. } in mismatches {
msg.push_str(&format!("\ncrate `{}`: {}", self.crate_name, path.display()));
}
err.note(&msg);
err
} else if !self.rejected_via_version.is_empty() {
let mut err = struct_span_err!(
self.sess,
self.span,
E0514,
"found crate `{}` compiled by an incompatible version \
of rustc{}",
self.crate_name,
add
);
err.help(&format!(
"please recompile that crate using this compiler ({})",
rustc_version()
));
let mismatches = self.rejected_via_version.iter();
for &CrateMismatch { ref path, ref got } in mismatches {
msg.push_str(&format!(
"\ncrate `{}` compiled by {}: {}",
self.crate_name,
got,
path.display()
));
}
err.note(&msg);
err
} else {
let mut err = struct_span_err!(
self.sess,
self.span,
E0463,
"can't find crate for `{}`{}",
self.crate_name,
add
);
if (self.crate_name == sym::std || self.crate_name == sym::core)
&& self.triple != TargetTriple::from_triple(config::host_triple())
{
err.note(&format!("the `{}` target may not be installed", self.triple));
} else if self.crate_name == sym::profiler_builtins {
err.note(&"the compiler may have been built without the profiler runtime");
}
err.span_label(self.span, "can't find crate");
err
};
if !self.rejected_via_filename.is_empty() {
let dylibname = self.dylibname();
let mismatches = self.rejected_via_filename.iter();
for &CrateMismatch { ref path, .. } in mismatches {
err.note(&format!(
"extern location for {} is of an unknown type: {}",
self.crate_name,
path.display()
))
.help(&format!(
"file name should be lib*.rlib or {}*.{}",
dylibname.0, dylibname.1
));
if let Some(extra_filename) = self.extra_filename {
if let library @ Some(_) = self.find_library_crate(extra_filename, &mut seen_paths)? {
return Ok(library);
}
}
err.emit();
self.sess.abort_if_errors();
unreachable!();
self.find_library_crate("", &mut seen_paths)
}
fn find_library_crate(
&mut self,
extra_prefix: &str,
seen_paths: &mut FxHashSet<PathBuf>,
) -> Option<Library> {
let dypair = self.dylibname();
let staticpair = self.staticlibname();
) -> Result<Option<Library>, CrateError> {
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}{}", dypair.0, self.crate_name, extra_prefix);
let dylib_prefix =
format!("{}{}{}", self.target.options.dll_prefix, self.crate_name, extra_prefix);
let rlib_prefix = format!("lib{}{}", self.crate_name, extra_prefix);
let staticlib_prefix = format!("{}{}{}", staticpair.0, self.crate_name, extra_prefix);
let staticlib_prefix =
format!("{}{}{}", self.target.options.staticlib_prefix, self.crate_name, extra_prefix);
let mut candidates: FxHashMap<_, (FxHashMap<_, _>, FxHashMap<_, _>, FxHashMap<_, _>)> =
Default::default();
@ -555,10 +405,18 @@ impl<'a> CrateLocator<'a> {
(&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib)
} else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") {
(&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta)
} else if file.starts_with(&dylib_prefix) && file.ends_with(&dypair.1) {
(&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib)
} else if file.starts_with(&dylib_prefix)
&& file.ends_with(&self.target.options.dll_suffix)
{
(
&file
[(dylib_prefix.len())..(file.len() - self.target.options.dll_suffix.len())],
CrateFlavor::Dylib,
)
} else {
if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) {
if file.starts_with(&staticlib_prefix)
&& file.ends_with(&self.target.options.staticlib_suffix)
{
staticlibs
.push(CrateMismatch { path: spf.path.clone(), got: "static".to_string() });
}
@ -567,9 +425,7 @@ impl<'a> CrateLocator<'a> {
info!("lib candidate: {}", spf.path.display());
let hash_str = hash.to_string();
let slot = candidates.entry(hash_str).or_default();
let (ref mut rlibs, ref mut rmetas, ref mut dylibs) = *slot;
let (rlibs, rmetas, dylibs) = candidates.entry(hash.to_string()).or_default();
fs::canonicalize(&spf.path)
.map(|p| {
if seen_paths.contains(&p) {
@ -577,16 +433,10 @@ impl<'a> CrateLocator<'a> {
};
seen_paths.insert(p.clone());
match found_kind {
CrateFlavor::Rlib => {
rlibs.insert(p, kind);
}
CrateFlavor::Rmeta => {
rmetas.insert(p, kind);
}
CrateFlavor::Dylib => {
dylibs.insert(p, kind);
}
}
CrateFlavor::Rlib => rlibs.insert(p, kind),
CrateFlavor::Rmeta => rmetas.insert(p, kind),
CrateFlavor::Dylib => dylibs.insert(p, kind),
};
FileMatches
})
.unwrap_or(FileDoesntMatch)
@ -603,7 +453,7 @@ impl<'a> CrateLocator<'a> {
// search is being performed for.
let mut libraries = FxHashMap::default();
for (_hash, (rlibs, rmetas, dylibs)) in candidates {
if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs) {
if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs)? {
libraries.insert(svh, lib);
}
}
@ -612,39 +462,9 @@ impl<'a> CrateLocator<'a> {
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.into_iter().next().unwrap().1),
_ => {
let mut err = struct_span_err!(
self.sess,
self.span,
E0464,
"multiple matching crates for `{}`",
self.crate_name
);
let candidates = libraries
.iter()
.filter_map(|(_, lib)| {
let crate_name = &lib.metadata.get_root().name().as_str();
match &(&lib.source.dylib, &lib.source.rlib) {
&(&Some((ref pd, _)), &Some((ref pr, _))) => Some(format!(
"\ncrate `{}`: {}\n{:>padding$}",
crate_name,
pd.display(),
pr.display(),
padding = 8 + crate_name.len()
)),
&(&Some((ref p, _)), &None) | &(&None, &Some((ref p, _))) => {
Some(format!("\ncrate `{}`: {}", crate_name, p.display()))
}
&(&None, &None) => None,
}
})
.collect::<String>();
err.note(&format!("candidates:{}", candidates));
err.emit();
None
}
0 => Ok(None),
1 => Ok(Some(libraries.into_iter().next().unwrap().1)),
_ => Err(CrateError::MultipleMatchingCrates(self.crate_name, libraries)),
}
}
@ -653,16 +473,16 @@ impl<'a> CrateLocator<'a> {
rlibs: FxHashMap<PathBuf, PathKind>,
rmetas: FxHashMap<PathBuf, PathKind>,
dylibs: FxHashMap<PathBuf, PathKind>,
) -> Option<(Svh, Library)> {
) -> Result<Option<(Svh, Library)>, CrateError> {
let mut slot = None;
// Order here matters, rmeta should come first. See comment in
// `extract_one` below.
let source = CrateSource {
rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot),
rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot),
dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot),
rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot)?,
rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot)?,
dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot)?,
};
slot.map(|(svh, metadata)| (svh, Library { source, metadata }))
Ok(slot.map(|(svh, metadata)| (svh, Library { source, metadata })))
}
fn needs_crate_flavor(&self, flavor: CrateFlavor) -> bool {
@ -698,10 +518,7 @@ impl<'a> CrateLocator<'a> {
m: FxHashMap<PathBuf, PathKind>,
flavor: CrateFlavor,
slot: &mut Option<(Svh, MetadataBlob)>,
) -> Option<(PathBuf, PathKind)> {
let mut ret: Option<(PathBuf, PathKind)> = None;
let mut error = 0;
) -> Result<Option<(PathBuf, PathKind)>, CrateError> {
// If we are producing an rlib, and we've already loaded metadata, then
// we should not attempt to discover further crate sources (unless we're
// locating a proc macro; exact logic is in needs_crate_flavor). This means
@ -718,13 +535,14 @@ impl<'a> CrateLocator<'a> {
// from the other crate sources.
if slot.is_some() {
if m.is_empty() || !self.needs_crate_flavor(flavor) {
return None;
return Ok(None);
} else if m.len() == 1 {
return Some(m.into_iter().next().unwrap());
return Ok(Some(m.into_iter().next().unwrap()));
}
}
let mut err: Option<DiagnosticBuilder<'_>> = None;
let mut ret: Option<(PathBuf, PathKind)> = None;
let mut err_data: Option<Vec<PathBuf>> = None;
for (lib, kind) in m {
info!("{} reading metadata from: {}", flavor, lib.display());
let (hash, metadata) =
@ -744,30 +562,18 @@ impl<'a> CrateLocator<'a> {
};
// If we see multiple hashes, emit an error about duplicate candidates.
if slot.as_ref().map_or(false, |s| s.0 != hash) {
let mut e = struct_span_err!(
self.sess,
self.span,
E0465,
"multiple {} candidates for `{}` found",
flavor,
self.crate_name
);
e.span_note(
self.span,
&format!(r"candidate #1: {}", ret.as_ref().unwrap().0.display()),
);
if let Some(ref mut e) = err {
e.emit();
if let Some(candidates) = err_data {
return Err(CrateError::MultipleCandidates(
self.crate_name,
flavor,
candidates,
));
}
err = Some(e);
error = 1;
err_data = Some(vec![ret.as_ref().unwrap().0.clone()]);
*slot = None;
}
if error > 0 {
error += 1;
err.as_mut()
.unwrap()
.span_note(self.span, &format!(r"candidate #{}: {}", error, lib.display()));
if let Some(candidates) = &mut err_data {
candidates.push(lib);
continue;
}
@ -790,7 +596,7 @@ impl<'a> CrateLocator<'a> {
// As a result, we favor the sysroot crate here. Note that the
// candidates are all canonicalized, so we canonicalize the sysroot
// as well.
if let Some((ref prev, _)) = ret {
if let Some((prev, _)) = &ret {
let sysroot = &self.sess.sysroot;
let sysroot = sysroot.canonicalize().unwrap_or_else(|_| sysroot.to_path_buf());
if prev.starts_with(&sysroot) {
@ -801,11 +607,10 @@ impl<'a> CrateLocator<'a> {
ret = Some((lib, kind));
}
if error > 0 {
err.unwrap().emit();
None
if let Some(candidates) = err_data {
Err(CrateError::MultipleCandidates(self.crate_name, flavor, candidates))
} else {
ret
Ok(ret)
}
}
@ -860,71 +665,29 @@ impl<'a> CrateLocator<'a> {
Some(hash)
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (String, String) {
let t = &self.target;
(t.options.dll_prefix.clone(), t.options.dll_suffix.clone())
}
// Returns the corresponding (prefix, suffix) that files need to have for
// static libraries
fn staticlibname(&self) -> (String, String) {
let t = &self.target;
(t.options.staticlib_prefix.clone(), t.options.staticlib_suffix.clone())
}
fn find_commandline_library(&mut self) -> Option<Library> {
fn find_commandline_library(&mut self) -> Result<Option<Library>, CrateError> {
// First, filter out all libraries that look suspicious. We only accept
// files which actually exist that have the correct naming scheme for
// rlibs/dylibs.
let sess = self.sess;
let dylibname = self.dylibname();
let mut rlibs = FxHashMap::default();
let mut rmetas = FxHashMap::default();
let mut dylibs = FxHashMap::default();
{
let crate_name = self.crate_name;
let rejected_via_filename = &mut self.rejected_via_filename;
let locs = self.exact_paths.iter().filter(|loc| {
if !loc.exists() {
sess.err(&format!(
"extern location for {} does not exist: {}",
crate_name,
loc.display()
));
return false;
}
let file = match loc.file_name().and_then(|s| s.to_str()) {
Some(file) => file,
None => {
sess.err(&format!(
"extern location for {} is not a file: {}",
crate_name,
loc.display()
));
return false;
}
};
if file.starts_with("lib") && (file.ends_with(".rlib") || file.ends_with(".rmeta"))
{
return true;
} else {
let (ref prefix, ref suffix) = dylibname;
if file.starts_with(&prefix[..]) && file.ends_with(&suffix[..]) {
return true;
}
for loc in &self.exact_paths {
if !loc.exists() {
return Err(CrateError::ExternLocationNotExist(self.crate_name, loc.clone()));
}
let file = match loc.file_name().and_then(|s| s.to_str()) {
Some(file) => file,
None => {
return Err(CrateError::ExternLocationNotFile(self.crate_name, loc.clone()));
}
};
rejected_via_filename
.push(CrateMismatch { path: (*loc).clone(), got: String::new() });
false
});
// Now that we have an iterator of good candidates, make sure
// there's at most one rlib and at most one dylib.
for loc in locs {
if file.starts_with("lib") && (file.ends_with(".rlib") || file.ends_with(".rmeta"))
|| file.starts_with(&self.target.options.dll_prefix)
&& file.ends_with(&self.target.options.dll_suffix)
{
// Make sure there's at most one rlib and at most one dylib.
if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") {
rlibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag);
} else if loc.file_name().unwrap().to_str().unwrap().ends_with(".rmeta") {
@ -932,25 +695,30 @@ impl<'a> CrateLocator<'a> {
} else {
dylibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag);
}
} else {
self.rejected_via_filename
.push(CrateMismatch { path: loc.clone(), got: String::new() });
}
};
}
// Extract the dylib/rlib/rmeta triple.
self.extract_lib(rlibs, rmetas, dylibs).map(|(_, lib)| lib)
Ok(self.extract_lib(rlibs, rmetas, dylibs)?.map(|(_, lib)| lib))
}
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(
target: &Target,
flavor: CrateFlavor,
filename: &Path,
loader: &dyn MetadataLoader,
) -> Result<MetadataBlob, String> {
let start = Instant::now();
let ret = get_metadata_section_imp(target, flavor, filename, loader);
info!("reading {:?} => {:?}", filename.file_name().unwrap(), start.elapsed());
ret
crate fn into_error(self) -> CrateError {
CrateError::LocatorCombined(CombinedLocatorError {
crate_name: self.crate_name,
root: self.root.cloned(),
triple: self.triple,
dll_prefix: self.target.options.dll_prefix.clone(),
dll_suffix: self.target.options.dll_suffix.clone(),
rejected_via_hash: self.rejected_via_hash,
rejected_via_triple: self.rejected_via_triple,
rejected_via_kind: self.rejected_via_kind,
rejected_via_version: self.rejected_via_version,
rejected_via_filename: self.rejected_via_filename,
})
}
}
/// A trivial wrapper for `Mmap` that implements `StableDeref`.
@ -966,7 +734,7 @@ impl Deref for StableDerefMmap {
unsafe impl stable_deref_trait::StableDeref for StableDerefMmap {}
fn get_metadata_section_imp(
fn get_metadata_section(
target: &Target,
flavor: CrateFlavor,
filename: &Path,
@ -1026,12 +794,19 @@ pub fn find_plugin_registrar(
metadata_loader: &dyn MetadataLoader,
span: Span,
name: Symbol,
) -> Option<(PathBuf, CrateDisambiguator)> {
) -> (PathBuf, CrateDisambiguator) {
match find_plugin_registrar_impl(sess, metadata_loader, name) {
Ok(res) => res,
Err(err) => err.report(sess, span),
}
}
fn find_plugin_registrar_impl<'a>(
sess: &'a Session,
metadata_loader: &dyn MetadataLoader,
name: Symbol,
) -> Result<(PathBuf, CrateDisambiguator), CrateError> {
info!("find plugin registrar `{}`", name);
let target_triple = sess.opts.target_triple.clone();
let host_triple = TargetTriple::from_triple(config::host_triple());
let is_cross = target_triple != host_triple;
let mut target_only = false;
let mut locator = CrateLocator::new(
sess,
metadata_loader,
@ -1041,57 +816,16 @@ pub fn find_plugin_registrar(
None, // extra_filename
true, // is_host
PathKind::Crate,
span,
None, // root
None, // is_proc_macro
);
let library = locator.maybe_load_library_crate().or_else(|| {
if !is_cross {
return None;
}
// Try loading from target crates. This will abort later if we
// try to load a plugin registrar function,
target_only = true;
locator.target = &sess.target.target;
locator.triple = target_triple;
locator.filesearch = sess.target_filesearch(PathKind::Crate);
locator.maybe_load_library_crate()
});
let library = match library {
Some(l) => l,
None => locator.report_errs(),
};
if target_only {
let message = format!(
"plugin `{}` is not available for triple `{}` (only found {})",
name,
config::host_triple(),
sess.opts.target_triple
);
struct_span_err!(sess, span, E0456, "{}", &message).emit();
return None;
}
match library.source.dylib {
Some(dylib) => Some((dylib.0, library.metadata.get_root().disambiguator())),
None => {
struct_span_err!(
sess,
span,
E0457,
"plugin `{}` only found in rlib format, but must be available \
in dylib format",
name
)
.emit();
// No need to abort because the loading code will just ignore this
// empty dylib.
None
}
match locator.maybe_load_library_crate()? {
Some(library) => match library.source.dylib {
Some(dylib) => Ok((dylib.0, library.metadata.get_root().disambiguator())),
None => Err(CrateError::NonDylibPlugin(name)),
},
None => Err(locator.into_error()),
}
}
@ -1100,8 +834,8 @@ pub fn list_file_metadata(
target: &Target,
path: &Path,
metadata_loader: &dyn MetadataLoader,
out: &mut dyn io::Write,
) -> io::Result<()> {
out: &mut dyn Write,
) -> IoResult<()> {
let filename = path.file_name().unwrap().to_str().unwrap();
let flavor = if filename.ends_with(".rlib") {
CrateFlavor::Rlib
@ -1115,3 +849,259 @@ pub fn list_file_metadata(
Err(msg) => write!(out, "{}\n", msg),
}
}
// ------------------------------------------ Error reporting -------------------------------------
#[derive(Clone)]
struct CrateMismatch {
path: PathBuf,
got: String,
}
/// Candidate rejection reasons collected during crate search.
/// If no candidate is accepted, then these reasons are presented to the user,
/// otherwise they are ignored.
crate struct CombinedLocatorError {
crate_name: Symbol,
root: Option<CratePaths>,
triple: TargetTriple,
dll_prefix: String,
dll_suffix: String,
rejected_via_hash: Vec<CrateMismatch>,
rejected_via_triple: Vec<CrateMismatch>,
rejected_via_kind: Vec<CrateMismatch>,
rejected_via_version: Vec<CrateMismatch>,
rejected_via_filename: Vec<CrateMismatch>,
}
crate enum CrateError {
NonAsciiName(Symbol),
ExternLocationNotExist(Symbol, PathBuf),
ExternLocationNotFile(Symbol, PathBuf),
MultipleCandidates(Symbol, CrateFlavor, Vec<PathBuf>),
MultipleMatchingCrates(Symbol, FxHashMap<Svh, Library>),
SymbolConflictsCurrent(Symbol),
SymbolConflictsOthers(Symbol),
DlOpen(String),
DlSym(String),
LocatorCombined(CombinedLocatorError),
NonDylibPlugin(Symbol),
}
impl CrateError {
crate fn report(self, sess: &Session, span: Span) -> ! {
let mut err = match self {
CrateError::NonAsciiName(crate_name) => sess.struct_span_err(
span,
&format!("cannot load a crate with a non-ascii name `{}`", crate_name),
),
CrateError::ExternLocationNotExist(crate_name, loc) => sess.struct_span_err(
span,
&format!("extern location for {} does not exist: {}", crate_name, loc.display()),
),
CrateError::ExternLocationNotFile(crate_name, loc) => sess.struct_span_err(
span,
&format!("extern location for {} is not a file: {}", crate_name, loc.display()),
),
CrateError::MultipleCandidates(crate_name, flavor, candidates) => {
let mut err = struct_span_err!(
sess,
span,
E0465,
"multiple {} candidates for `{}` found",
flavor,
crate_name,
);
for (i, candidate) in candidates.iter().enumerate() {
err.span_note(span, &format!("candidate #{}: {}", i + 1, candidate.display()));
}
err
}
CrateError::MultipleMatchingCrates(crate_name, libraries) => {
let mut err = struct_span_err!(
sess,
span,
E0464,
"multiple matching crates for `{}`",
crate_name
);
let candidates = libraries
.iter()
.filter_map(|(_, lib)| {
let crate_name = &lib.metadata.get_root().name().as_str();
match (&lib.source.dylib, &lib.source.rlib) {
(Some((pd, _)), Some((pr, _))) => Some(format!(
"\ncrate `{}`: {}\n{:>padding$}",
crate_name,
pd.display(),
pr.display(),
padding = 8 + crate_name.len()
)),
(Some((p, _)), None) | (None, Some((p, _))) => {
Some(format!("\ncrate `{}`: {}", crate_name, p.display()))
}
(None, None) => None,
}
})
.collect::<String>();
err.note(&format!("candidates:{}", candidates));
err
}
CrateError::SymbolConflictsCurrent(root_name) => struct_span_err!(
sess,
span,
E0519,
"the current crate is indistinguishable from one of its dependencies: it has the \
same crate-name `{}` and was compiled with the same `-C metadata` arguments. \
This will result in symbol conflicts between the two.",
root_name,
),
CrateError::SymbolConflictsOthers(root_name) => struct_span_err!(
sess,
span,
E0523,
"found two different crates with name `{}` that are not distinguished by differing \
`-C metadata`. This will result in symbol conflicts between the two.",
root_name,
),
CrateError::DlOpen(s) | CrateError::DlSym(s) => sess.struct_span_err(span, &s),
CrateError::LocatorCombined(locator) => {
let crate_name = locator.crate_name;
let add = match &locator.root {
None => String::new(),
Some(r) => format!(" which `{}` depends on", r.name),
};
let mut msg = "the following crate versions were found:".to_string();
let mut err = if !locator.rejected_via_hash.is_empty() {
let mut err = struct_span_err!(
sess,
span,
E0460,
"found possibly newer version of crate `{}`{}",
crate_name,
add,
);
err.note("perhaps that crate needs to be recompiled?");
let mismatches = locator.rejected_via_hash.iter();
for CrateMismatch { path, .. } in mismatches {
msg.push_str(&format!("\ncrate `{}`: {}", crate_name, path.display()));
}
if let Some(r) = locator.root {
for path in r.source.paths() {
msg.push_str(&format!("\ncrate `{}`: {}", r.name, path.display()));
}
}
err.note(&msg);
err
} else if !locator.rejected_via_triple.is_empty() {
let mut err = struct_span_err!(
sess,
span,
E0461,
"couldn't find crate `{}` with expected target triple {}{}",
crate_name,
locator.triple,
add,
);
let mismatches = locator.rejected_via_triple.iter();
for CrateMismatch { path, got } in mismatches {
msg.push_str(&format!(
"\ncrate `{}`, target triple {}: {}",
crate_name,
got,
path.display(),
));
}
err.note(&msg);
err
} else if !locator.rejected_via_kind.is_empty() {
let mut err = struct_span_err!(
sess,
span,
E0462,
"found staticlib `{}` instead of rlib or dylib{}",
crate_name,
add,
);
err.help("please recompile that crate using --crate-type lib");
let mismatches = locator.rejected_via_kind.iter();
for CrateMismatch { path, .. } in mismatches {
msg.push_str(&format!("\ncrate `{}`: {}", crate_name, path.display()));
}
err.note(&msg);
err
} else if !locator.rejected_via_version.is_empty() {
let mut err = struct_span_err!(
sess,
span,
E0514,
"found crate `{}` compiled by an incompatible version of rustc{}",
crate_name,
add,
);
err.help(&format!(
"please recompile that crate using this compiler ({})",
rustc_version(),
));
let mismatches = locator.rejected_via_version.iter();
for CrateMismatch { path, got } in mismatches {
msg.push_str(&format!(
"\ncrate `{}` compiled by {}: {}",
crate_name,
got,
path.display(),
));
}
err.note(&msg);
err
} else {
let mut err = struct_span_err!(
sess,
span,
E0463,
"can't find crate for `{}`{}",
crate_name,
add,
);
if (crate_name == sym::std || crate_name == sym::core)
&& locator.triple != TargetTriple::from_triple(config::host_triple())
{
err.note(&format!("the `{}` target may not be installed", locator.triple));
} else if crate_name == sym::profiler_builtins {
err.note(&"the compiler may have been built without the profiler runtime");
}
err.span_label(span, "can't find crate");
err
};
if !locator.rejected_via_filename.is_empty() {
let mismatches = locator.rejected_via_filename.iter();
for CrateMismatch { path, .. } in mismatches {
err.note(&format!(
"extern location for {} is of an unknown type: {}",
crate_name,
path.display(),
))
.help(&format!(
"file name should be lib*.rlib or {}*.{}",
locator.dll_prefix, locator.dll_suffix
));
}
}
err
}
CrateError::NonDylibPlugin(crate_name) => struct_span_err!(
sess,
span,
E0457,
"plugin `{}` only found in rlib format, but must be available in dylib format",
crate_name,
),
};
err.emit();
sess.abort_if_errors();
unreachable!();
}
}

View file

@ -2,9 +2,10 @@
/// Positional arguments to `libcore::count_code_region()`
pub mod count_code_region_args {
pub const COUNTER_INDEX: usize = 0;
pub const START_BYTE_POS: usize = 1;
pub const END_BYTE_POS: usize = 2;
pub const FUNCTION_SOURCE_HASH: usize = 0;
pub const COUNTER_INDEX: usize = 1;
pub const START_BYTE_POS: usize = 2;
pub const END_BYTE_POS: usize = 3;
}
/// Positional arguments to `libcore::coverage_counter_add()` and

View file

@ -86,7 +86,7 @@ impl<'tcx> MonoItem<'tcx> {
.debugging_opts
.inline_in_all_cgus
.unwrap_or_else(|| tcx.sess.opts.optimize != OptLevel::No)
&& !tcx.sess.opts.cg.link_dead_code;
&& tcx.sess.opts.cg.link_dead_code != Some(true);
match *self {
MonoItem::Fn(ref instance) => {

View file

@ -400,13 +400,11 @@ pub struct DestructuredConst<'tcx> {
/// `InstrumentCoverage` MIR pass and can be retrieved via the `coverageinfo` query.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct CoverageInfo {
/// A hash value that can be used by the consumer of the coverage profile data to detect
/// changes to the instrumented source of the associated MIR body (typically, for an
/// individual function).
pub hash: u64,
/// The total number of coverage region counters added to the MIR `Body`.
pub num_counters: u32,
/// The total number of coverage region counter expressions added to the MIR `Body`.
pub num_expressions: u32,
}
impl<'tcx> TyCtxt<'tcx> {

View file

@ -95,6 +95,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let (dest, ret) = match ret {
None => match intrinsic_name {
sym::transmute => throw_ub_format!("transmuting to uninhabited type"),
sym::unreachable => throw_ub!(Unreachable),
sym::abort => M::abort(self)?,
// Unsupported diverging intrinsic.
_ => return Ok(false),

View file

@ -161,7 +161,7 @@ where
// Next we try to make as many symbols "internal" as possible, so LLVM has
// more freedom to optimize.
if !tcx.sess.opts.cg.link_dead_code {
if tcx.sess.opts.cg.link_dead_code != Some(true) {
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_internalize_symbols");
internalize_symbols(tcx, &mut post_inlining, inlining_map);
}
@ -906,7 +906,7 @@ fn collect_and_partition_mono_items(
}
}
None => {
if tcx.sess.opts.cg.link_dead_code {
if tcx.sess.opts.cg.link_dead_code == Some(true) {
MonoItemCollectionMode::Eager
} else {
MonoItemCollectionMode::Lazy

View file

@ -35,46 +35,64 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage
// represents a single function. Validate and/or correct if inlining (which should be disabled
// if -Zinstrument-coverage is enabled) and/or monomorphization invalidates these assumptions.
let count_code_region_fn = tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
let coverage_counter_add_fn =
tcx.require_lang_item(lang_items::CoverageCounterAddFnLangItem, None);
let coverage_counter_subtract_fn =
tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None);
// The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
// counters, with each counter having an index from `0..num_counters-1`. MIR optimization
// may split and duplicate some BasicBlock sequences. Simply counting the calls may not
// not work; but computing the num_counters by adding `1` to the highest index (for a given
// instrumented function) is valid.
//
// `num_expressions` is the number of counter expressions added to the MIR body. Both
// `num_counters` and `num_expressions` are used to initialize new vectors, during backend
// code generate, to lookup counters and expressions by their simple u32 indexes.
let mut num_counters: u32 = 0;
for terminator in traversal::preorder(mir_body)
.map(|(_, data)| (data, count_code_region_fn))
.filter_map(terminators_that_call_given_fn)
let mut num_expressions: u32 = 0;
for terminator in
traversal::preorder(mir_body).map(|(_, data)| data).filter_map(call_terminators)
{
if let TerminatorKind::Call { args, .. } = &terminator.kind {
let index_arg = args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
let index =
mir::Operand::scalar_from_const(index_arg).to_u32().expect("index arg is u32");
num_counters = std::cmp::max(num_counters, index + 1);
}
}
let hash = if num_counters > 0 { hash_mir_source(tcx, mir_def_id) } else { 0 };
CoverageInfo { num_counters, hash }
}
fn terminators_that_call_given_fn(
(data, fn_def_id): (&'tcx BasicBlockData<'tcx>, DefId),
) -> Option<&'tcx Terminator<'tcx>> {
if let Some(terminator) = &data.terminator {
if let TerminatorKind::Call { func: Operand::Constant(func), .. } = &terminator.kind {
if let FnDef(called_fn_def_id, _) = func.literal.ty.kind {
if called_fn_def_id == fn_def_id {
return Some(&terminator);
if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind {
match func.literal.ty.kind {
FnDef(id, _) if id == count_code_region_fn => {
let index_arg =
args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
let counter_index = mir::Operand::scalar_from_const(index_arg)
.to_u32()
.expect("index arg is u32");
num_counters = std::cmp::max(num_counters, counter_index + 1);
}
FnDef(id, _)
if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
{
let index_arg = args
.get(coverage_counter_expression_args::COUNTER_EXPRESSION_INDEX)
.expect("arg found");
let translated_index = mir::Operand::scalar_from_const(index_arg)
.to_u32()
.expect("index arg is u32");
// Counter expressions start with "translated indexes", descending from
// `u32::MAX`, so the range of expression indexes is disjoint from the range of
// counter indexes. This way, both counters and expressions can be operands in
// other expressions.
let expression_index = u32::MAX - translated_index;
num_expressions = std::cmp::max(num_expressions, expression_index + 1);
}
_ => {}
}
}
}
None
CoverageInfo { num_counters, num_expressions }
}
struct Instrumentor<'tcx> {
tcx: TyCtxt<'tcx>,
num_counters: u32,
fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator<'tcx>> {
let terminator = data.terminator();
match terminator.kind {
TerminatorKind::Call { .. } => Some(terminator),
_ => None,
}
}
impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
@ -83,42 +101,106 @@ impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
// If the InstrumentCoverage pass is called on promoted MIRs, skip them.
// See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
if src.promoted.is_none() {
debug!(
"instrumenting {:?}, span: {}",
src.def_id(),
tcx.sess.source_map().span_to_string(mir_body.span)
);
Instrumentor::new(tcx).inject_counters(mir_body);
Instrumentor::new(tcx, src, mir_body).inject_counters();
}
}
}
}
impl<'tcx> Instrumentor<'tcx> {
fn new(tcx: TyCtxt<'tcx>) -> Self {
Self { tcx, num_counters: 0 }
/// Distinguishes the expression operators.
enum Op {
Add,
Subtract,
}
struct Instrumentor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
mir_def_id: DefId,
mir_body: &'a mut mir::Body<'tcx>,
hir_body: &'tcx rustc_hir::Body<'tcx>,
function_source_hash: Option<u64>,
num_counters: u32,
num_expressions: u32,
}
impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
fn new(tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &'a mut mir::Body<'tcx>) -> Self {
let mir_def_id = src.def_id();
let hir_body = hir_body(tcx, mir_def_id);
Self {
tcx,
mir_def_id,
mir_body,
hir_body,
function_source_hash: None,
num_counters: 0,
num_expressions: 0,
}
}
/// Counter IDs start from zero and go up.
fn next_counter(&mut self) -> u32 {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = self.num_counters;
self.num_counters += 1;
next
}
fn inject_counters(&mut self, mir_body: &mut mir::Body<'tcx>) {
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
let code_region = mir_body.span;
let next_block = START_BLOCK;
self.inject_counter(mir_body, code_region, next_block);
/// Expression IDs start from u32::MAX and go down because a CounterExpression can reference
/// (add or subtract counts) of both Counter regions and CounterExpression regions. The indexes
/// of each type of region must be contiguous, but also must be unique across both sets.
/// The expression IDs are eventually translated into region indexes (starting after the last
/// counter index, for the given function), during backend code generation, by the helper method
/// `rustc_codegen_ssa::coverageinfo::map::FunctionCoverage::translate_expressions()`.
fn next_expression(&mut self) -> u32 {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = u32::MAX - self.num_expressions;
self.num_expressions += 1;
next
}
fn inject_counter(
&mut self,
mir_body: &mut mir::Body<'tcx>,
code_region: Span,
next_block: BasicBlock,
) {
fn function_source_hash(&mut self) -> u64 {
match self.function_source_hash {
Some(hash) => hash,
None => {
let hash = hash_mir_source(self.tcx, self.hir_body);
self.function_source_hash.replace(hash);
hash
}
}
}
fn inject_counters(&mut self) {
let body_span = self.hir_body.value.span;
debug!(
"instrumenting {:?}, span: {}",
self.mir_def_id,
self.tcx.sess.source_map().span_to_string(body_span)
);
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
let next_block = START_BLOCK;
self.inject_counter(body_span, next_block);
// FIXME(richkadel): The next step to implement source based coverage analysis will be
// instrumenting branches within functions, and some regions will be counted by "counter
// expression". The function to inject counter expression is implemented. Replace this
// "fake use" with real use.
let fake_use = false;
if fake_use {
let add = false;
if add {
self.inject_counter_expression(body_span, next_block, 1, Op::Add, 2);
} else {
self.inject_counter_expression(body_span, next_block, 1, Op::Subtract, 2);
}
}
}
fn inject_counter(&mut self, code_region: Span, next_block: BasicBlock) -> u32 {
let counter_id = self.next_counter();
let function_source_hash = self.function_source_hash();
let injection_point = code_region.shrink_to_lo();
let count_code_region_fn = function_handle(
@ -127,13 +209,14 @@ impl<'tcx> Instrumentor<'tcx> {
injection_point,
);
let index = self.next_counter();
let mut args = Vec::new();
use count_code_region_args::*;
debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
args.push(self.const_u64(function_source_hash, injection_point));
debug_assert_eq!(COUNTER_INDEX, args.len());
args.push(self.const_u32(index, injection_point));
args.push(self.const_u32(counter_id, injection_point));
debug_assert_eq!(START_BYTE_POS, args.len());
args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
@ -141,36 +224,98 @@ impl<'tcx> Instrumentor<'tcx> {
debug_assert_eq!(END_BYTE_POS, args.len());
args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
let mut patch = MirPatch::new(mir_body);
self.inject_call(count_code_region_fn, args, injection_point, next_block);
let temp = patch.new_temp(self.tcx.mk_unit(), code_region);
let new_block = patch.new_block(placeholder_block(code_region));
counter_id
}
fn inject_counter_expression(
&mut self,
code_region: Span,
next_block: BasicBlock,
lhs: u32,
op: Op,
rhs: u32,
) -> u32 {
let expression_id = self.next_expression();
let injection_point = code_region.shrink_to_lo();
let count_code_region_fn = function_handle(
self.tcx,
self.tcx.require_lang_item(
match op {
Op::Add => lang_items::CoverageCounterAddFnLangItem,
Op::Subtract => lang_items::CoverageCounterSubtractFnLangItem,
},
None,
),
injection_point,
);
let mut args = Vec::new();
use coverage_counter_expression_args::*;
debug_assert_eq!(COUNTER_EXPRESSION_INDEX, args.len());
args.push(self.const_u32(expression_id, injection_point));
debug_assert_eq!(LEFT_INDEX, args.len());
args.push(self.const_u32(lhs, injection_point));
debug_assert_eq!(RIGHT_INDEX, args.len());
args.push(self.const_u32(rhs, injection_point));
debug_assert_eq!(START_BYTE_POS, args.len());
args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
debug_assert_eq!(END_BYTE_POS, args.len());
args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
self.inject_call(count_code_region_fn, args, injection_point, next_block);
expression_id
}
fn inject_call(
&mut self,
func: Operand<'tcx>,
args: Vec<Operand<'tcx>>,
fn_span: Span,
next_block: BasicBlock,
) {
let mut patch = MirPatch::new(self.mir_body);
let temp = patch.new_temp(self.tcx.mk_unit(), fn_span);
let new_block = patch.new_block(placeholder_block(fn_span));
patch.patch_terminator(
new_block,
TerminatorKind::Call {
func: count_code_region_fn,
func,
args,
// new_block will swapped with the next_block, after applying patch
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
fn_span: injection_point,
fn_span,
},
);
patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
patch.apply(mir_body);
patch.apply(self.mir_body);
// To insert the `new_block` in front of the first block in the counted branch (the
// `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
mir_body.basic_blocks_mut().swap(next_block, new_block);
self.mir_body.basic_blocks_mut().swap(next_block, new_block);
}
fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
}
fn const_u64(&self, value: u64, span: Span) -> Operand<'tcx> {
Operand::const_from_scalar(self.tcx, self.tcx.types.u64, Scalar::from_u64(value), span)
}
}
fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
@ -192,10 +337,13 @@ fn placeholder_block(span: Span) -> BasicBlockData<'tcx> {
}
}
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> u64 {
fn hir_body<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx rustc_hir::Body<'tcx> {
let hir_node = tcx.hir().get_if_local(def_id).expect("DefId is local");
let fn_body_id = hir::map::associated_body(hir_node).expect("HIR node is a function with body");
let hir_body = tcx.hir().body(fn_body_id);
tcx.hir().body(fn_body_id)
}
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 {
let mut hcx = tcx.create_no_span_stable_hashing_context();
hash(&mut hcx, &hir_body.value).to_smaller_hash()
}

View file

@ -3,14 +3,13 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
use rustc_hir::fake_lang_items::FAKE_ITEMS_REFS;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::lang_items;
use rustc_hir::lang_items::ITEM_REFS;
use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS;
use rustc_middle::middle::lang_items::required;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::CrateType;
use rustc_span::symbol::sym;
use rustc_span::symbol::Symbol;
use rustc_span::Span;
@ -77,15 +76,14 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
if self.items.require(item).is_err() {
self.items.missing.push(item);
}
} else if name == sym::count_code_region {
// `core::intrinsics::code_count_region()` is (currently) the only `extern` lang item
// that is never actually linked. It is not a `weak_lang_item` that can be registered
// when used, and should be registered here instead.
if let Some((item_index, _)) = ITEM_REFS.get(&name).cloned() {
if self.items.items[item_index].is_none() {
let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
self.items.items[item_index] = Some(item_def_id);
}
} else if let Some(&item) = FAKE_ITEMS_REFS.get(&name) {
// Ensure "fake lang items" are registered. These are `extern` lang items that are
// injected into the MIR automatically (such as source code coverage counters), but are
// never actually linked; therefore, unlike "weak lang items", they cannot by registered
// when used, because they never appear to be used.
if self.items.items[item as usize].is_none() {
let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
self.items.items[item as usize] = Some(item_def_id);
}
} else {
struct_span_err!(self.tcx.sess, span, E0264, "unknown external lang item: `{}`", name)

View file

@ -55,13 +55,11 @@ fn load_plugin(
metadata_loader: &dyn MetadataLoader,
ident: Ident,
) {
let registrar = locator::find_plugin_registrar(sess, metadata_loader, ident.span, ident.name);
if let Some((lib, disambiguator)) = registrar {
let symbol = sess.generate_plugin_registrar_symbol(disambiguator);
let fun = dylink_registrar(sess, ident.span, lib, symbol);
plugins.push(fun);
}
let (lib, disambiguator) =
locator::find_plugin_registrar(sess, metadata_loader, ident.span, ident.name);
let symbol = sess.generate_plugin_registrar_symbol(disambiguator);
let fun = dylink_registrar(sess, ident.span, lib, symbol);
plugins.push(fun);
}
// Dynamically link a registrar function into the compiler process.

View file

@ -859,9 +859,7 @@ impl<'a> Resolver<'a> {
// otherwise cause duplicate suggestions.
continue;
}
if let Some(crate_id) =
self.crate_loader.maybe_process_path_extern(ident.name, ident.span)
{
if let Some(crate_id) = self.crate_loader.maybe_process_path_extern(ident.name) {
let crate_root =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
suggestions.extend(self.lookup_import_candidates_from_module(

View file

@ -760,10 +760,8 @@ impl<'a> LateResolutionVisitor<'a, '_, '_> {
if !module.no_implicit_prelude {
let extern_prelude = self.r.extern_prelude.clone();
names.extend(extern_prelude.iter().flat_map(|(ident, _)| {
self.r
.crate_loader
.maybe_process_path_extern(ident.name, ident.span)
.and_then(|crate_id| {
self.r.crate_loader.maybe_process_path_extern(ident.name).and_then(
|crate_id| {
let crate_mod = Res::Def(
DefKind::Mod,
DefId { krate: crate_id, index: CRATE_DEF_INDEX },
@ -774,7 +772,8 @@ impl<'a> LateResolutionVisitor<'a, '_, '_> {
} else {
None
}
})
},
)
}));
if let Some(prelude) = self.r.prelude {

View file

@ -2957,7 +2957,7 @@ impl<'a> Resolver<'a> {
let crate_id = if !speculative {
self.crate_loader.process_path_extern(ident.name, ident.span)
} else {
self.crate_loader.maybe_process_path_extern(ident.name, ident.span)?
self.crate_loader.maybe_process_path_extern(ident.name)?
};
let crate_root = self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
Some(

View file

@ -1707,6 +1707,31 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
);
}
if debugging_opts.instrument_coverage {
if cg.profile_generate.enabled() || cg.profile_use.is_some() {
early_error(
error_format,
"option `-Z instrument-coverage` is not compatible with either `-C profile-use` \
or `-C profile-generate`",
);
}
// `-Z instrument-coverage` implies:
// * `-Z symbol-mangling-version=v0` - to ensure consistent and reversable name mangling.
// Note, LLVM coverage tools can analyze coverage over multiple runs, including some
// changes to source code; so mangled names must be consistent across compilations.
// * `-C link-dead-code` - so unexecuted code is still counted as zero, rather than be
// optimized out. Note that instrumenting dead code can be explicitly disabled with:
// `-Z instrument-coverage -C link-dead-code=no`.
debugging_opts.symbol_mangling_version = SymbolManglingVersion::V0;
if cg.link_dead_code == None {
// FIXME(richkadel): Investigate if the `instrument-coverage` implementation can
// inject ["zero counters"](https://llvm.org/docs/CoverageMappingFormat.html#counter)
// in the coverage map when "dead code" is removed, rather than forcing `link-dead-code`.
cg.link_dead_code = Some(true);
}
}
if !cg.embed_bitcode {
match cg.lto {
LtoCli::No | LtoCli::Unspecified => {}

View file

@ -715,7 +715,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
"a single extra argument to append to the linker invocation (can be used several times)"),
link_args: Vec<String> = (Vec::new(), parse_list, [UNTRACKED],
"extra arguments to append to the linker invocation (space separated)"),
link_dead_code: bool = (false, parse_bool, [UNTRACKED],
link_dead_code: Option<bool> = (None, parse_opt_bool, [UNTRACKED],
"keep dead code at link time (useful for code coverage) (default: no)"),
linker: Option<PathBuf> = (None, parse_opt_pathbuf, [UNTRACKED],
"system linker to link outputs with"),
@ -880,10 +880,12 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
(such as entering an empty infinite loop) by inserting llvm.sideeffect \
(default: no)"),
instrument_coverage: bool = (false, parse_bool, [TRACKED],
"instrument the generated code with LLVM code region counters to (in the \
future) generate coverage reports; disables/overrides some optimization \
options (note, the compiler build config must include `profiler = true`) \
(default: no)"),
"instrument the generated code to support LLVM source-based code coverage \
reports (note, the compiler build config must include `profiler = true`, \
and is mutually exclusive with `-C profile-generate`/`-C profile-use`); \
implies `-C link-dead-code` (unless explicitly disabled)` and
`-Z symbol-mangling-version=v0`; and disables/overrides some optimization \
options (default: no)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED],
"insert function instrument code for mcount-based tracing (default: no)"),
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],

View file

@ -1357,6 +1357,20 @@ fn validate_commandline_args_with_session_available(sess: &Session) {
);
}
// FIXME(richkadel): See `src/test/run-make-fulldeps/instrument-coverage/Makefile`. After
// compiling with `-Zinstrument-coverage`, the resulting binary generates a segfault during
// the program's exit process (likely while attempting to generate the coverage stats in
// the "*.profraw" file). An investigation to resolve the problem on Windows is ongoing,
// but until this is resolved, the option is disabled on Windows, and the test is skipped
// when targeting `MSVC`.
if sess.opts.debugging_opts.instrument_coverage && sess.target.target.options.is_like_msvc {
sess.warn(
"Rust source-based code coverage instrumentation (with `-Z instrument-coverage`) \
is not yet supported on Windows when targeting MSVC. The resulting binaries will \
still be instrumented for experimentation purposes, but may not execute correctly.",
);
}
const ASAN_SUPPORTED_TARGETS: &[&str] = &[
"aarch64-fuchsia",
"aarch64-unknown-linux-gnu",

View file

@ -676,6 +676,7 @@ supported_targets! {
("powerpc64-wrs-vxworks", powerpc64_wrs_vxworks),
("mipsel-sony-psp", mipsel_sony_psp),
("thumbv4t-none-eabi", thumbv4t_none_eabi),
}
/// Everything `rustc` knows about how to compile for a specific target.

View file

@ -0,0 +1,62 @@
//! Targets the ARMv4T, with code as `t32` code by default.
//!
//! Primarily of use for the GBA, but usable with other devices too.
//!
//! Please ping @Lokathor if changes are needed.
//!
//! This target profile assumes that you have the ARM binutils in your path (specifically the linker, `arm-none-eabi-ld`). They can be obtained for free for all major OSes from the ARM developer's website, and they may also be available in your system's package manager. Unfortunately, the standard linker that Rust uses (`lld`) only supports as far back as `ARMv5TE`, so we must use the GNU `ld` linker.
//!
//! **Important:** This target profile **does not** specify a linker script. You just get the default link script when you build a binary for this target. The default link script is very likely wrong, so you should use `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script.
use crate::spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
pub fn target() -> TargetResult {
Ok(Target {
llvm_target: "thumbv4t-none-eabi".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "32".to_string(),
target_c_int_width: "32".to_string(),
target_os: "none".to_string(),
target_env: "".to_string(),
target_vendor: "".to_string(),
arch: "arm".to_string(),
/* Data layout args are '-' separated:
* little endian
* stack is 64-bit aligned (EABI)
* pointers are 32-bit
* i64 must be 64-bit aligned (EABI)
* mangle names with ELF style
* native integers are 32-bit
* All other elements are default
*/
data_layout: "e-S64-p:32:32-i64:64-m:e-n32".to_string(),
linker_flavor: LinkerFlavor::Ld,
options: TargetOptions {
linker: Some("arm-none-eabi-ld".to_string()),
linker_is_gnu: true,
// extra args passed to the external assembler (assuming `arm-none-eabi-as`):
// * activate t32/a32 interworking
// * use arch ARMv4T
// * use little-endian
asm_args: vec![
"-mthumb-interwork".to_string(),
"-march=armv4t".to_string(),
"-mlittle-endian".to_string(),
],
// minimum extra features, these cannot be disabled via -C
features: "+soft-float,+strict-align".to_string(),
main_needs_argc_argv: false,
// No thread-local storage (just use a static Cell)
has_elf_tls: false,
// don't have atomic compare-and-swap
atomic_cas: false,
..super::thumb_base::opts()
},
})
}

View file

@ -386,7 +386,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
}
sym::count_code_region => {
(0, vec![tcx.types.u32, tcx.types.u32, tcx.types.u32], tcx.mk_unit())
(0, vec![tcx.types.u64, tcx.types.u32, tcx.types.u32, tcx.types.u32], tcx.mk_unit())
}
sym::coverage_counter_add | sym::coverage_counter_subtract => (

View file

@ -59,6 +59,7 @@
//! The `#[global_allocator]` can only be used once in a crate
//! or its recursive dependencies.
#![deny(unsafe_op_in_unsafe_fn)]
#![stable(feature = "alloc_module", since = "1.28.0")]
use core::intrinsics;
@ -158,7 +159,9 @@ unsafe impl AllocRef for System {
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
// SAFETY: The safety guarantees are explained in the documentation
// for the `GlobalAlloc` trait and its `dealloc` method.
unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) }
}
}
@ -184,16 +187,36 @@ unsafe impl AllocRef for System {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_layout =
// SAFETY: The new size and layout alignement guarantees
// are transfered to the caller (they come from parameters).
//
// See the preconditions for `Layout::from_size_align` to
// see what must be checked.
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > size` or something similar.
intrinsics::assume(new_size > size);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
let memory =
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
init.init_offset(memory, size);
// SAFETY:
//
// The safety guarantees are explained in the documentation
// for the `GlobalAlloc` trait and its `dealloc` method.
//
// `realloc` probably checks for `new_size > size` or something
// similar.
//
// For the guarantees about `init_offset`, see its documentation:
// `ptr` is assumed valid (and checked for non-NUL) and
// `memory.size` is set to `new_size` so the offset being `size`
// is valid.
let memory = unsafe {
intrinsics::assume(new_size > size);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
let memory =
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
init.init_offset(memory, size);
memory
};
Ok(memory)
}
}
@ -220,14 +243,28 @@ unsafe impl AllocRef for System {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
self.dealloc(ptr, layout);
// SAFETY: see `GlobalAlloc::dealloc` for the guarantees that
// must be respected. `ptr` and `layout` are parameters and so
// those guarantees must be checked by the caller.
unsafe { self.dealloc(ptr, layout) };
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < size` or something similar.
intrinsics::assume(new_size < size);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
// SAFETY:
//
// See `GlobalAlloc::realloc` for more informations about the
// guarantees expected by this method. `ptr`, `layout` and
// `new_size` are parameters and the responsability for their
// correctness is left to the caller.
//
// `realloc` probably checks for `new_size < size` or something
// similar.
let memory = unsafe {
intrinsics::assume(new_size < size);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }
};
Ok(memory)
}
}
}
@ -300,13 +337,19 @@ pub mod __default_lib_allocator {
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 {
let layout = Layout::from_size_align_unchecked(size, align);
System.alloc(layout)
// SAFETY: see the guarantees expected by `Layout::from_size_align` and
// `GlobalAlloc::alloc`.
unsafe {
let layout = Layout::from_size_align_unchecked(size, align);
System.alloc(layout)
}
}
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) {
System.dealloc(ptr, Layout::from_size_align_unchecked(size, align))
// SAFETY: see the guarantees expected by `Layout::from_size_align` and
// `GlobalAlloc::dealloc`.
unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) }
}
#[rustc_std_internal_symbol]
@ -316,13 +359,21 @@ pub mod __default_lib_allocator {
align: usize,
new_size: usize,
) -> *mut u8 {
let old_layout = Layout::from_size_align_unchecked(old_size, align);
System.realloc(ptr, old_layout, new_size)
// SAFETY: see the guarantees expected by `Layout::from_size_align` and
// `GlobalAlloc::realloc`.
unsafe {
let old_layout = Layout::from_size_align_unchecked(old_size, align);
System.realloc(ptr, old_layout, new_size)
}
}
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
let layout = Layout::from_size_align_unchecked(size, align);
System.alloc_zeroed(layout)
// SAFETY: see the guarantees expected by `Layout::from_size_align` and
// `GlobalAlloc::alloc_zeroed`.
unsafe {
let layout = Layout::from_size_align_unchecked(size, align);
System.alloc_zeroed(layout)
}
}
}

View file

@ -148,14 +148,11 @@ use crate::sys;
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`Cell`]: ../../std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`RefCell`]: crate::cell::RefCell
/// [`Cell`]: crate::cell::Cell
/// [`default`]: Default::default
/// [`with_hasher`]: Self::with_hasher
/// [`with_capacity_and_hasher`]: Self::with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
///
/// ```
@ -264,8 +261,6 @@ impl<K, V, S> HashMap<K, V, S> {
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
@ -296,8 +291,6 @@ impl<K, V, S> HashMap<K, V, S> {
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> {
@ -524,8 +517,6 @@ impl<K, V, S> HashMap<K, V, S> {
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
@ -556,8 +547,6 @@ where
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: ../../std/primitive.usize.html
///
/// # Examples
///
/// ```
@ -676,9 +665,6 @@ where
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
@ -705,9 +691,6 @@ where
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
@ -734,9 +717,6 @@ where
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
@ -763,9 +743,6 @@ where
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
@ -797,8 +774,7 @@ where
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
/// [module-level documentation]: crate::collections#insert-and-complex-keys
///
/// # Examples
///
@ -826,9 +802,6 @@ where
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
@ -856,9 +829,6 @@ where
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
@ -1040,8 +1010,7 @@ where
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
/// [`iter`]: HashMap::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a, V: 'a> {
base: base::Iter<'a, K, V>,
@ -1068,8 +1037,7 @@ impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
/// [`iter_mut`]: HashMap::iter_mut
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, K: 'a, V: 'a> {
base: base::IterMut<'a, K, V>,
@ -1088,8 +1056,7 @@ impl<'a, K, V> IterMut<'a, K, V> {
/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
/// [`into_iter`]: IntoIterator::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
base: base::IntoIter<K, V>,
@ -1108,8 +1075,7 @@ impl<K, V> IntoIter<K, V> {
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
/// [`keys`]: HashMap::keys
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
@ -1136,8 +1102,7 @@ impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
/// [`values`]: HashMap::values
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
@ -1164,8 +1129,7 @@ impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
/// [`drain`]: HashMap::drain
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a, K: 'a, V: 'a> {
base: base::Drain<'a, K, V>,
@ -1184,8 +1148,7 @@ impl<'a, K, V> Drain<'a, K, V> {
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
/// [`values_mut`]: HashMap::values_mut
#[stable(feature = "map_values_mut", since = "1.10.0")]
pub struct ValuesMut<'a, K: 'a, V: 'a> {
inner: IterMut<'a, K, V>,
@ -1195,7 +1158,7 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> {
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`HashMap::raw_entry_mut`]: HashMap::raw_entry_mut
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> {
@ -1209,9 +1172,8 @@ pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> {
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`raw_entry_mut`]: HashMap::raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> {
@ -1223,8 +1185,6 @@ pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> {
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a> {
base: base::RawOccupiedEntryMut<'a, K, V>,
@ -1232,8 +1192,6 @@ pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a> {
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> {
base: base::RawVacantEntryMut<'a, K, V, S>,
@ -1243,7 +1201,7 @@ pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> {
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
/// [`HashMap::raw_entry`]: HashMap::raw_entry
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawEntryBuilder<'a, K: 'a, V: 'a, S: 'a> {
map: &'a HashMap<K, V, S>,
@ -1597,8 +1555,7 @@ impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
/// [`entry`]: HashMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// An occupied entry.
@ -2156,7 +2113,7 @@ impl<'a, K, V> OccupiedEntry<'a, K, V> {
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
/// [`into_mut`]: Self::into_mut
///
/// # Examples
///
@ -2189,7 +2146,7 @@ impl<'a, K, V> OccupiedEntry<'a, K, V> {
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
/// [`get_mut`]: Self::get_mut
///
/// # Examples
///
@ -2475,9 +2432,6 @@ where
/// [`Hasher`], but the hashers created by two different `RandomState`
/// instances are unlikely to produce the same result for the same values.
///
/// [`HashMap`]: struct.HashMap.html
/// [`Hasher`]: ../../hash/trait.Hasher.html
///
/// # Examples
///
/// ```
@ -2547,9 +2501,6 @@ impl BuildHasher for RandomState {
///
/// The internal algorithm is not specified, and so it and its hashes should
/// not be relied upon over releases.
///
/// [`RandomState`]: struct.RandomState.html
/// [`Hasher`]: ../../hash/trait.Hasher.html
#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
#[allow(deprecated)]
#[derive(Clone, Debug)]

View file

@ -98,12 +98,8 @@ use super::map::{self, HashMap, Keys, RandomState};
/// // use the values stored in the set
/// ```
///
/// [`Cell`]: ../../std/cell/struct.Cell.html
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
/// [`HashMap`]: struct.HashMap.html
/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`RefCell`]: crate::cell::RefCell
/// [`Cell`]: crate::cell::Cell
#[derive(Clone)]
#[cfg_attr(not(test), rustc_diagnostic_item = "hashset_type")]
#[stable(feature = "rust1", since = "1.0.0")]
@ -286,8 +282,6 @@ impl<T, S> HashSet<T, S> {
/// let mut set = HashSet::with_hasher(s);
/// set.insert(2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hasher: S) -> HashSet<T, S> {
@ -318,8 +312,6 @@ impl<T, S> HashSet<T, S> {
/// let mut set = HashSet::with_capacity_and_hasher(10, s);
/// set.insert(1);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> {
@ -328,8 +320,6 @@ impl<T, S> HashSet<T, S> {
/// Returns a reference to the set's [`BuildHasher`].
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
@ -577,9 +567,6 @@ where
/// assert_eq!(set.contains(&1), true);
/// assert_eq!(set.contains(&4), false);
/// ```
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
@ -605,9 +592,6 @@ where
/// assert_eq!(set.get(&2), Some(&2));
/// assert_eq!(set.get(&4), None);
/// ```
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
@ -849,9 +833,6 @@ where
/// assert_eq!(set.remove(&2), true);
/// assert_eq!(set.remove(&2), false);
/// ```
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
@ -877,9 +858,6 @@ where
/// assert_eq!(set.take(&2), Some(2));
/// assert_eq!(set.take(&2), None);
/// ```
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
@ -1153,8 +1131,7 @@ where
/// This `struct` is created by the [`iter`] method on [`HashSet`].
/// See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`iter`]: struct.HashSet.html#method.iter
/// [`iter`]: HashSet::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a> {
iter: Keys<'a, K, ()>,
@ -1165,8 +1142,7 @@ pub struct Iter<'a, K: 'a> {
/// This `struct` is created by the [`into_iter`] method on [`HashSet`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`into_iter`]: struct.HashSet.html#method.into_iter
/// [`into_iter`]: IntoIterator::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K> {
iter: map::IntoIter<K, ()>,
@ -1177,8 +1153,7 @@ pub struct IntoIter<K> {
/// This `struct` is created by the [`drain`] method on [`HashSet`].
/// See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`drain`]: struct.HashSet.html#method.drain
/// [`drain`]: HashSet::drain
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Drain<'a, K: 'a> {
iter: map::Drain<'a, K, ()>,
@ -1189,8 +1164,7 @@ pub struct Drain<'a, K: 'a> {
/// This `struct` is created by the [`intersection`] method on [`HashSet`].
/// See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`intersection`]: struct.HashSet.html#method.intersection
/// [`intersection`]: HashSet::intersection
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a, S: 'a> {
// iterator of the first set
@ -1204,8 +1178,7 @@ pub struct Intersection<'a, T: 'a, S: 'a> {
/// This `struct` is created by the [`difference`] method on [`HashSet`].
/// See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`difference`]: struct.HashSet.html#method.difference
/// [`difference`]: HashSet::difference
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a, S: 'a> {
// iterator of the first set
@ -1219,8 +1192,7 @@ pub struct Difference<'a, T: 'a, S: 'a> {
/// This `struct` is created by the [`symmetric_difference`] method on
/// [`HashSet`]. See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference
/// [`symmetric_difference`]: HashSet::symmetric_difference
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SymmetricDifference<'a, T: 'a, S: 'a> {
iter: Chain<Difference<'a, T, S>, Difference<'a, T, S>>,
@ -1231,8 +1203,7 @@ pub struct SymmetricDifference<'a, T: 'a, S: 'a> {
/// This `struct` is created by the [`union`] method on [`HashSet`].
/// See its documentation for more.
///
/// [`HashSet`]: struct.HashSet.html
/// [`union`]: struct.HashSet.html#method.union
/// [`union`]: HashSet::union
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Union<'a, T: 'a, S: 'a> {
iter: Chain<Iter<'a, T>, Difference<'a, T, S>>,

View file

@ -1551,6 +1551,27 @@ impl ops::Index<ops::RangeFull> for CString {
}
}
#[stable(feature = "cstr_range_from", since = "1.47.0")]
impl ops::Index<ops::RangeFrom<usize>> for CStr {
type Output = CStr;
fn index(&self, index: ops::RangeFrom<usize>) -> &CStr {
let bytes = self.to_bytes_with_nul();
// we need to manually check the starting index to account for the null
// byte, since otherwise we could get an empty string that doesn't end
// in a null.
if index.start < bytes.len() {
unsafe { CStr::from_bytes_with_nul_unchecked(&bytes[index.start..]) }
} else {
panic!(
"index out of bounds: the len is {} but the index is {}",
bytes.len(),
index.start
);
}
}
}
#[stable(feature = "cstring_asref", since = "1.7.0")]
impl AsRef<CStr> for CStr {
#[inline]
@ -1747,4 +1768,21 @@ mod tests {
assert_eq!(CSTR.to_str().unwrap(), "Hello, world!");
}
#[test]
fn cstr_index_from() {
let original = b"Hello, world!\0";
let cstr = CStr::from_bytes_with_nul(original).unwrap();
let result = CStr::from_bytes_with_nul(&original[7..]).unwrap();
assert_eq!(&cstr[7..], result);
}
#[test]
#[should_panic]
fn cstr_index_from_empty() {
let original = b"Hello, world!\0";
let cstr = CStr::from_bytes_with_nul(original).unwrap();
let _ = &cstr[original.len()..];
}
}

View file

@ -238,25 +238,14 @@
//! contract. The implementation of many of these functions are subject to change over
//! time and may call fewer or more syscalls/library functions.
//!
//! [`Read`]: trait.Read.html
//! [`Write`]: trait.Write.html
//! [`Seek`]: trait.Seek.html
//! [`BufRead`]: trait.BufRead.html
//! [`File`]: ../fs/struct.File.html
//! [`TcpStream`]: ../net/struct.TcpStream.html
//! [`Vec<T>`]: ../vec/struct.Vec.html
//! [`BufReader`]: struct.BufReader.html
//! [`BufWriter`]: struct.BufWriter.html
//! [`Write::write`]: trait.Write.html#tymethod.write
//! [`io::stdout`]: fn.stdout.html
//! [`println!`]: ../macro.println.html
//! [`Lines`]: struct.Lines.html
//! [`io::Result`]: type.Result.html
//! [`File`]: crate::fs::File
//! [`TcpStream`]: crate::net::TcpStream
//! [`Vec<T>`]: crate::vec::Vec
//! [`io::stdout`]: stdout
//! [`io::Result`]: crate::io::Result
//! [`?` operator]: ../../book/appendix-02-operators.html
//! [`Read::read`]: trait.Read.html#tymethod.read
//! [`Result`]: ../result/enum.Result.html
//! [`.unwrap()`]: ../result/enum.Result.html#method.unwrap
// ignore-tidy-filelength
//! [`Result`]: crate::result::Result
//! [`.unwrap()`]: crate::result::Result::unwrap
#![stable(feature = "rust1", since = "1.0.0")]
@ -491,12 +480,10 @@ where
/// }
/// ```
///
/// [`read()`]: trait.Read.html#tymethod.read
/// [`std::io`]: ../../std/io/index.html
/// [`File`]: ../fs/struct.File.html
/// [`BufRead`]: trait.BufRead.html
/// [`BufReader`]: struct.BufReader.html
/// [`&str`]: ../../std/primitive.str.html
/// [`read()`]: Read::read
/// [`&str`]: str
/// [`std::io`]: self
/// [`File`]: crate::fs::File
/// [slice]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(spotlight)]
@ -535,7 +522,7 @@ pub trait Read {
/// before calling `read`. Calling `read` with an uninitialized `buf` (of the kind one
/// obtains via [`MaybeUninit<T>`]) is not safe, and can lead to undefined behavior.
///
/// [`MaybeUninit<T>`]: ../mem/union.MaybeUninit.html
/// [`MaybeUninit<T>`]: crate::mem::MaybeUninit
///
/// # Errors
///
@ -550,10 +537,8 @@ pub trait Read {
///
/// [`File`]s implement `Read`:
///
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`Ok(n)`]: ../../std/result/enum.Result.html#variant.Ok
/// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`File`]: ../fs/struct.File.html
/// [`Ok(n)`]: Ok
/// [`File`]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -620,9 +605,6 @@ pub trait Read {
/// This method is unsafe because a `Read`er could otherwise return a
/// non-zeroing `Initializer` from another `Read` type without an `unsafe`
/// block.
///
/// [`Initializer::nop()`]: ../../std/io/struct.Initializer.html#method.nop
/// [`Initializer`]: ../../std/io/struct.Initializer.html
#[unstable(feature = "read_initializer", issue = "42788")]
#[inline]
unsafe fn initializer(&self) -> Initializer {
@ -652,10 +634,9 @@ pub trait Read {
///
/// [`File`]s implement `Read`:
///
/// [`read()`]: trait.Read.html#tymethod.read
/// [`Ok(0)`]: ../../std/result/enum.Result.html#variant.Ok
/// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`File`]: ../fs/struct.File.html
/// [`read()`]: Read::read
/// [`Ok(0)`]: Ok
/// [`File`]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -675,7 +656,7 @@ pub trait Read {
/// (See also the [`std::fs::read`] convenience function for reading from a
/// file.)
///
/// [`std::fs::read`]: ../fs/fn.read.html
/// [`std::fs::read`]: crate::fs::read
#[stable(feature = "rust1", since = "1.0.0")]
fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> {
read_to_end(self, buf)
@ -693,13 +674,13 @@ pub trait Read {
///
/// See [`read_to_end`][readtoend] for other error semantics.
///
/// [readtoend]: #method.read_to_end
/// [readtoend]: Self::read_to_end
///
/// # Examples
///
/// [`File`][file]s implement `Read`:
///
/// [file]: ../fs/struct.File.html
/// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -718,7 +699,7 @@ pub trait Read {
/// (See also the [`std::fs::read_to_string`] convenience function for
/// reading from a file.)
///
/// [`std::fs::read_to_string`]: ../fs/fn.read_to_string.html
/// [`std::fs::read_to_string`]: crate::fs::read_to_string
#[stable(feature = "rust1", since = "1.0.0")]
fn read_to_string(&mut self, buf: &mut String) -> Result<usize> {
// Note that we do *not* call `.read_to_end()` here. We are passing
@ -767,9 +748,7 @@ pub trait Read {
/// [`File`]s implement `Read`:
///
/// [`read`]: Read::read
/// [`File`]: ../fs/struct.File.html
/// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`ErrorKind::UnexpectedEof`]: ../../std/io/enum.ErrorKind.html#variant.UnexpectedEof
/// [`File`]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -814,7 +793,7 @@ pub trait Read {
///
/// [`File`][file]s implement `Read`:
///
/// [file]: ../fs/struct.File.html
/// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -858,14 +837,10 @@ pub trait Read {
///
/// [`File`][file]s implement `Read`:
///
/// [file]: ../fs/struct.File.html
/// [`Iterator`]: ../../std/iter/trait.Iterator.html
/// [`Result`]: ../../std/result/enum.Result.html
/// [`io::Error`]: ../../std/io/struct.Error.html
/// [`u8`]: ../../std/primitive.u8.html
/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [file]: crate::fs::File
/// [`Iterator`]: crate::iter::Iterator
/// [`Result`]: crate::result::Result
/// [`io::Error`]: self::Error
///
/// ```no_run
/// use std::io;
@ -899,7 +874,7 @@ pub trait Read {
///
/// [`File`][file]s implement `Read`:
///
/// [file]: ../fs/struct.File.html
/// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -938,9 +913,9 @@ pub trait Read {
///
/// [`File`]s implement `Read`:
///
/// [`File`]: ../fs/struct.File.html
/// [`Ok(0)`]: ../../std/result/enum.Result.html#variant.Ok
/// [`read()`]: trait.Read.html#tymethod.read
/// [`File`]: crate::fs::File
/// [`Ok(0)`]: Ok
/// [`read()`]: Read::read
///
/// ```no_run
/// use std::io;
@ -1236,8 +1211,8 @@ impl Initializer {
/// throughout [`std::io`] take and provide types which implement the `Write`
/// trait.
///
/// [`write`]: #tymethod.write
/// [`flush`]: #tymethod.flush
/// [`write`]: Self::write
/// [`flush`]: Self::flush
/// [`std::io`]: index.html
///
/// # Examples
@ -1263,7 +1238,7 @@ impl Initializer {
/// The trait also provides convenience methods like [`write_all`], which calls
/// `write` in a loop until its entire input has been written.
///
/// [`write_all`]: #method.write_all
/// [`write_all`]: Self::write_all
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(spotlight)]
pub trait Write {
@ -1295,10 +1270,6 @@ pub trait Write {
/// An error of the [`ErrorKind::Interrupted`] kind is non-fatal and the
/// write operation should be retried if there is nothing else to do.
///
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`Ok(n)`]: ../../std/result/enum.Result.html#variant.Ok
/// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
///
/// # Examples
///
/// ```no_run
@ -1384,8 +1355,7 @@ pub trait Write {
/// This function will return the first error of
/// non-[`ErrorKind::Interrupted`] kind that [`write`] returns.
///
/// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`write`]: #tymethod.write
/// [`write`]: Self::write
///
/// # Examples
///
@ -1426,8 +1396,7 @@ pub trait Write {
///
/// If the buffer contains no data, this will never call [`write_vectored`].
///
/// [`write_vectored`]: #method.write_vectored
/// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`write_vectored`]: Self::write_vectored
///
/// # Notes
///
@ -1483,19 +1452,16 @@ pub trait Write {
/// encountered.
///
/// This method is primarily used to interface with the
/// [`format_args!`][formatargs] macro, but it is rare that this should
/// explicitly be called. The [`write!`][write] macro should be favored to
/// [`format_args!()`] macro, but it is rare that this should
/// explicitly be called. The [`write!()`] macro should be favored to
/// invoke this method instead.
///
/// [formatargs]: ../macro.format_args.html
/// [write]: ../macro.write.html
///
/// This function internally uses the [`write_all`][writeall] method on
/// this trait and hence will continuously write data so long as no errors
/// are received. This also means that partial writes are not indicated in
/// this signature.
///
/// [writeall]: #method.write_all
/// [writeall]: Self::write_all
///
/// # Errors
///
@ -1592,7 +1558,7 @@ pub trait Write {
///
/// [`File`][file]s implement `Seek`:
///
/// [file]: ../fs/struct.File.html
/// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
@ -1792,9 +1758,9 @@ fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>) -> R
/// [`BufReader`] to the rescue!
///
/// [`BufReader`]: struct.BufReader.html
/// [`File`]: ../fs/struct.File.html
/// [`read_line`]: #method.read_line
/// [`lines`]: #method.lines
/// [`File`]: crate::fs::File
/// [`read_line`]: Self::read_line
/// [`lines`]: Self::lines
/// [`Read`]: trait.Read.html
///
/// ```no_run
@ -1826,7 +1792,7 @@ pub trait BufRead: Read {
/// be called with the number of bytes that are consumed from this buffer to
/// ensure that the bytes are never returned twice.
///
/// [`consume`]: #tymethod.consume
/// [`consume`]: Self::consume
///
/// An empty buffer returned indicates that the stream has reached EOF.
///
@ -1876,7 +1842,7 @@ pub trait BufRead: Read {
/// Since `consume()` is meant to be used with [`fill_buf`],
/// that method's example includes an example of `consume()`.
///
/// [`fill_buf`]: #tymethod.fill_buf
/// [`fill_buf`]: Self::fill_buf
#[stable(feature = "rust1", since = "1.0.0")]
fn consume(&mut self, amt: usize);
@ -1900,7 +1866,7 @@ pub trait BufRead: Read {
/// If an I/O error is encountered then all bytes read so far will be
/// present in `buf` and its length will have been adjusted appropriately.
///
/// [`fill_buf`]: #tymethod.fill_buf
/// [`fill_buf`]: Self::fill_buf
/// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted
///
/// # Examples
@ -1965,7 +1931,7 @@ pub trait BufRead: Read {
/// error is encountered then `buf` may contain some bytes already read in
/// the event that all data read so far was valid UTF-8.
///
/// [`read_until`]: #method.read_until
/// [`read_until`]: Self::read_until
///
/// # Examples
///
@ -2018,9 +1984,9 @@ pub trait BufRead: Read {
/// This function will yield errors whenever [`read_until`] would have
/// also yielded an error.
///
/// [`io::Result`]: type.Result.html
/// [`Vec<u8>`]: ../vec/struct.Vec.html
/// [`read_until`]: #method.read_until
/// [`io::Result`]: self::Result
/// [`Vec<u8>`]: crate::vec::Vec
/// [`read_until`]: Self::read_until
///
/// # Examples
///
@ -2055,8 +2021,7 @@ pub trait BufRead: Read {
/// [`io::Result`]`<`[`String`]`>`. Each string returned will *not* have a newline
/// byte (the 0xA byte) or CRLF (0xD, 0xA bytes) at the end.
///
/// [`io::Result`]: type.Result.html
/// [`String`]: ../string/struct.String.html
/// [`io::Result`]: self::Result
///
/// # Examples
///
@ -2064,8 +2029,6 @@ pub trait BufRead: Read {
/// this example, we use [`Cursor`] to iterate over all the lines in a byte
/// slice.
///
/// [`Cursor`]: struct.Cursor.html
///
/// ```
/// use std::io::{self, BufRead};
///
@ -2256,8 +2219,6 @@ impl<T> Take<T> {
/// This instance may reach `EOF` after reading fewer bytes than indicated by
/// this method if the underlying [`Read`] instance reaches EOF.
///
/// [`Read`]: ../../std/io/trait.Read.html
///
/// # Examples
///
/// ```no_run

View file

@ -1497,11 +1497,188 @@ mod super_keyword {}
#[doc(keyword = "trait")]
//
/// A common interface for a class of types.
/// A common interface for a group of types.
///
/// The documentation for this keyword is [not yet complete]. Pull requests welcome!
/// A `trait` is like an interface that data types can implement. When a type
/// implements a trait it can be treated abstractly as that trait using generics
/// or trait objects.
///
/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601
/// Traits can be made up of three varieties of associated items:
///
/// - functions and methods
/// - types
/// - constants
///
/// Traits may also contain additional type parameters. Those type parameters
/// or the trait itself can be constrained by other traits.
///
/// Traits can serve as markers or carry other logical semantics that
/// aren't expressed through their items. When a type implements that
/// trait it is promising to uphold its contract. [`Send`] and [`Sync`] are two
/// such marker traits present in the standard library.
///
/// See the [Reference][Ref-Traits] for a lot more information on traits.
///
/// # Examples
///
/// Traits are declared using the `trait` keyword. Types can implement them
/// using [`impl`] `Trait` [`for`] `Type`:
///
/// ```rust
/// trait Zero {
/// const ZERO: Self;
/// fn is_zero(&self) -> bool;
/// }
///
/// impl Zero for i32 {
/// const ZERO: Self = 0;
///
/// fn is_zero(&self) -> bool {
/// *self == Self::ZERO
/// }
/// }
///
/// assert_eq!(i32::ZERO, 0);
/// assert!(i32::ZERO.is_zero());
/// assert!(!4.is_zero());
/// ```
///
/// With an associated type:
///
/// ```rust
/// trait Builder {
/// type Built;
///
/// fn build(&self) -> Self::Built;
/// }
/// ```
///
/// Traits can be generic, with constraints or without:
///
/// ```rust
/// trait MaybeFrom<T> {
/// fn maybe_from(value: T) -> Option<Self>
/// where
/// Self: Sized;
/// }
/// ```
///
/// Traits can build upon the requirements of other traits. In the example
/// below `Iterator` is a **supertrait** and `ThreeIterator` is a **subtrait**:
///
/// ```rust
/// trait ThreeIterator: std::iter::Iterator {
/// fn next_three(&mut self) -> Option<[Self::Item; 3]>;
/// }
/// ```
///
/// Traits can be used in functions, as parameters:
///
/// ```rust
/// # #![allow(dead_code)]
/// fn debug_iter<I: Iterator>(it: I) where I::Item: std::fmt::Debug {
/// for elem in it {
/// println!("{:#?}", elem);
/// }
/// }
///
/// // u8_len_1, u8_len_2 and u8_len_3 are equivalent
///
/// fn u8_len_1(val: impl Into<Vec<u8>>) -> usize {
/// val.into().len()
/// }
///
/// fn u8_len_2<T: Into<Vec<u8>>>(val: T) -> usize {
/// val.into().len()
/// }
///
/// fn u8_len_3<T>(val: T) -> usize
/// where
/// T: Into<Vec<u8>>,
/// {
/// val.into().len()
/// }
/// ```
///
/// Or as return types:
///
/// ```rust
/// # #![allow(dead_code)]
/// fn from_zero_to(v: u8) -> impl Iterator<Item = u8> {
/// (0..v).into_iter()
/// }
/// ```
///
/// The use of the [`impl`] keyword in this position allows the function writer
/// to hide the concrete type as an implementation detail which can change
/// without breaking user's code.
///
/// # Trait objects
///
/// A *trait object* is an opaque value of another type that implements a set of
/// traits. A trait object implements all specified traits as well as their
/// supertraits (if any).
///
/// The syntax is the following: `dyn BaseTrait + AutoTrait1 + ... AutoTraitN`.
/// Only one `BaseTrait` can be used so this will not compile:
///
/// ```rust,compile_fail,E0225
/// trait A {}
/// trait B {}
///
/// let _: Box<dyn A + B>;
/// ```
///
/// Neither will this, which is a syntax error:
///
/// ```rust,compile_fail
/// trait A {}
/// trait B {}
///
/// let _: Box<dyn A + dyn B>;
/// ```
///
/// On the other hand, this is correct:
///
/// ```rust
/// trait A {}
///
/// let _: Box<dyn A + Send + Sync>;
/// ```
///
/// The [Reference][Ref-Trait-Objects] has more information about trait objects,
/// their limitations and the differences between editions.
///
/// # Unsafe traits
///
/// Some traits may be unsafe to implement. Using the [`unsafe`] keyword in
/// front of the trait's declaration is used to mark this:
///
/// ```rust
/// unsafe trait UnsafeTrait {}
///
/// unsafe impl UnsafeTrait for i32 {}
/// ```
///
/// # Differences between the 2015 and 2018 editions
///
/// In the 2015 edition parameters pattern where not needed for traits:
///
/// ```rust,edition2015
/// trait Tr {
/// fn f(i32);
/// }
/// ```
///
/// This behavior is no longer valid in edition 2018.
///
/// [`for`]: keyword.for.html
/// [`impl`]: keyword.impl.html
/// [`unsafe`]: keyword.unsafe.html
/// [`Send`]: marker/trait.Send.html
/// [`Sync`]: marker/trait.Sync.html
/// [Ref-Traits]: ../reference/items/traits.html
/// [Ref-Trait-Objects]: ../reference/types/trait-object.html
mod trait_keyword {}
#[doc(keyword = "true")]

View file

@ -305,6 +305,7 @@
#![feature(ptr_internals)]
#![feature(raw)]
#![feature(raw_ref_macros)]
#![feature(ready_macro)]
#![feature(renamed_spin_loop)]
#![feature(rustc_attrs)]
#![feature(rustc_private)]

View file

@ -84,12 +84,12 @@ impl Command {
Ok(0) => return Ok((p, ours)),
Ok(8) => {
let (errno, footer) = bytes.split_at(4);
assert!(
combine(CLOEXEC_MSG_FOOTER) == combine(footer.try_into().unwrap()),
assert_eq!(
CLOEXEC_MSG_FOOTER, footer,
"Validation on the CLOEXEC pipe failed: {:?}",
bytes
);
let errno = combine(errno.try_into().unwrap());
let errno = i32::from_be_bytes(errno.try_into().unwrap());
assert!(p.wait().is_ok(), "wait() should either return Ok or panic");
return Err(Error::from_raw_os_error(errno));
}
@ -105,10 +105,6 @@ impl Command {
}
}
}
fn combine(arr: [u8; 4]) -> i32 {
i32::from_be_bytes(arr)
}
}
pub fn exec(&mut self, default: Stdio) -> io::Error {

View file

@ -0,0 +1,115 @@
#include "rustllvm.h"
#include "llvm/ProfileData/Coverage/CoverageMapping.h"
#include "llvm/ProfileData/Coverage/CoverageMappingWriter.h"
#include "llvm/ProfileData/InstrProf.h"
#include "llvm/ADT/ArrayRef.h"
#include <iostream>
using namespace llvm;
extern "C" SmallVectorTemplateBase<coverage::CounterExpression>
*LLVMRustCoverageSmallVectorCounterExpressionCreate() {
return new SmallVector<coverage::CounterExpression, 32>();
}
extern "C" void LLVMRustCoverageSmallVectorCounterExpressionDispose(
SmallVectorTemplateBase<coverage::CounterExpression> *Vector) {
delete Vector;
}
extern "C" void LLVMRustCoverageSmallVectorCounterExpressionAdd(
SmallVectorTemplateBase<coverage::CounterExpression> *Expressions,
coverage::CounterExpression::ExprKind Kind,
unsigned LeftIndex,
unsigned RightIndex) {
auto LHS = coverage::Counter::getCounter(LeftIndex);
auto RHS = coverage::Counter::getCounter(RightIndex);
Expressions->push_back(coverage::CounterExpression { Kind, LHS, RHS });
}
extern "C" SmallVectorTemplateBase<coverage::CounterMappingRegion>
*LLVMRustCoverageSmallVectorCounterMappingRegionCreate() {
return new SmallVector<coverage::CounterMappingRegion, 32>();
}
extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
SmallVectorTemplateBase<coverage::CounterMappingRegion> *Vector) {
delete Vector;
}
extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
SmallVectorTemplateBase<coverage::CounterMappingRegion> *MappingRegions,
unsigned Index,
unsigned FileID,
unsigned LineStart,
unsigned ColumnStart,
unsigned LineEnd,
unsigned ColumnEnd) {
auto Counter = coverage::Counter::getCounter(Index);
MappingRegions->push_back(coverage::CounterMappingRegion::makeRegion(
Counter, FileID, LineStart,
ColumnStart, LineEnd, ColumnEnd));
// FIXME(richkadel): As applicable, implement additional CounterMappingRegion types using the
// static method alternatives to `coverage::CounterMappingRegion::makeRegion`:
//
// makeExpansion(unsigned FileID, unsigned ExpandedFileID, unsigned LineStart,
// unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
// makeSkipped(unsigned FileID, unsigned LineStart, unsigned ColumnStart,
// unsigned LineEnd, unsigned ColumnEnd) {
// makeGapRegion(Counter Count, unsigned FileID, unsigned LineStart,
// unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
}
extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer(
const char* const Filenames[],
size_t FilenamesLen,
RustStringRef BufferOut) {
SmallVector<StringRef,32> FilenameRefs;
for (size_t i = 0; i < FilenamesLen; i++) {
FilenameRefs.push_back(StringRef(Filenames[i]));
}
auto FilenamesWriter = coverage::CoverageFilenamesSectionWriter(
makeArrayRef(FilenameRefs));
RawRustStringOstream OS(BufferOut);
FilenamesWriter.write(OS);
}
extern "C" void LLVMRustCoverageWriteMappingToBuffer(
const unsigned *VirtualFileMappingIDs,
unsigned NumVirtualFileMappingIDs,
const SmallVectorImpl<coverage::CounterExpression> *Expressions,
SmallVectorImpl<coverage::CounterMappingRegion> *MappingRegions,
RustStringRef BufferOut) {
auto CoverageMappingWriter = coverage::CoverageMappingWriter(
makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs),
makeArrayRef(*Expressions),
MutableArrayRef<coverage::CounterMappingRegion> { *MappingRegions });
RawRustStringOstream OS(BufferOut);
CoverageMappingWriter.write(OS);
}
extern "C" uint64_t LLVMRustCoverageComputeHash(const char *Name) {
StringRef NameRef(Name);
return IndexedInstrProf::ComputeHash(NameRef);
}
extern "C" void LLVMRustCoverageWriteSectionNameToString(LLVMModuleRef M,
RustStringRef Str) {
Triple TargetTriple(unwrap(M)->getTargetTriple());
auto name = getInstrProfSectionName(IPSK_covmap,
TargetTriple.getObjectFormat());
RawRustStringOstream OS(Str);
OS << name;
}
extern "C" void LLVMRustCoverageWriteMappingVarNameToString(RustStringRef Str) {
auto name = getCoverageMappingVarName();
RawRustStringOstream OS(Str);
OS << name;
}
extern "C" uint32_t LLVMRustCoverageMappingVersion() {
return coverage::CovMapVersion::CurrentVersion;
}

View file

@ -1395,7 +1395,7 @@ extern "C" LLVMValueRef LLVMRustBuildCall(LLVMBuilderRef B, LLVMValueRef Fn,
FTy, Callee, makeArrayRef(unwrap(Args), NumArgs), Bundles));
}
extern "C" LLVMValueRef LLVMRustGetInstrprofIncrementIntrinsic(LLVMModuleRef M) {
extern "C" LLVMValueRef LLVMRustGetInstrProfIncrementIntrinsic(LLVMModuleRef M) {
return wrap(llvm::Intrinsic::getDeclaration(unwrap(M),
(llvm::Intrinsic::ID)llvm::Intrinsic::instrprof_increment));
}

View file

@ -3,6 +3,7 @@
#include "llvm-c/Object.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseSet.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/Triple.h"
#include "llvm/Analysis/Lint.h"
#include "llvm/Analysis/Passes.h"

View file

@ -1,4 +1,4 @@
// compile-flags: --extern std=
// error-pattern: can't find crate for `std`
// error-pattern: extern location for std does not exist
fn main() {}

View file

@ -3,34 +3,40 @@
fn bar() -> bool {
let mut _0: bool; // return place in scope 0 at $DIR/instrument_coverage.rs:18:13: 18:17
+ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
+ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
bb0: {
+ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
+ _1 = const std::intrinsics::count_code_region(const 0_u32, const 484_u32, const 513_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
+ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
+ _1 = const std::intrinsics::count_code_region(const 10208505205182607101_u64, const 0_u32, const 501_u32, const 513_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
+ // ty::Const
+ // + ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // ty::Const
+ // + ty: u64
+ // + val: Value(Scalar(0x8dabe565aaa2aefd))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: u64, val: Value(Scalar(0x8dabe565aaa2aefd)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000000))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x000001e4))
+ // + val: Value(Scalar(0x000001f5))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001e4)) }
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001f5)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000201))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000201)) }
+ }
+

View file

@ -6,35 +6,41 @@
let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
let mut _2: bool; // in scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17
let mut _3: !; // in scope 0 at $DIR/instrument_coverage.rs:11:18: 13:10
+ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
+ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
bb0: {
- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
+ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
+ _4 = const std::intrinsics::count_code_region(const 0_u32, const 387_u32, const 465_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
+ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
+ _4 = const std::intrinsics::count_code_region(const 16004455475339839479_u64, const 0_u32, const 397_u32, const 465_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
+ // ty::Const
+ // + ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // ty::Const
+ // + ty: u64
+ // + val: Value(Scalar(0xde1b3f75a72fc7f7))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: u64, val: Value(Scalar(0xde1b3f75a72fc7f7)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000000))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000183))
+ // + val: Value(Scalar(0x0000018d))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000183)) }
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: u32, val: Value(Scalar(0x0000018d)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x000001d1))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001d1)) }
}

View file

@ -0,0 +1,57 @@
# needs-profiler-support
# ignore-msvc
# FIXME(richkadel): Debug the following problem, and reenable on Windows (by
# removing the `# ignore-msvc` directive above). The current implementation
# generates a segfault when running the instrumented `main` executable,
# after the `main` program code executes, but before the process terminates.
# This most likely points to a problem generating the LLVM "main.profraw"
# file.
-include ../tools.mk
# This test makes sure that LLVM coverage maps are genereated in LLVM IR.
COMMON_FLAGS=-Zinstrument-coverage
all:
# Compile the test program with instrumentation, and also generate LLVM IR
$(RUSTC) $(COMMON_FLAGS) main.rs
# Run it in order to generate some profiling data,
# with `LLVM_PROFILE_FILE=<profdata_file>` environment variable set to
# output the coverage stats for this run.
LLVM_PROFILE_FILE="$(TMPDIR)"/main.profraw \
$(call RUN,main)
# Postprocess the profiling data so it can be used by the llvm-cov tool
"$(LLVM_BIN_DIR)"/llvm-profdata merge --sparse \
"$(TMPDIR)"/main.profraw \
-o "$(TMPDIR)"/main.profdata
# Generate a coverage report using `llvm-cov show`. The output ordering
# can be non-deterministic, so ignore the return status. If the test fails
# when comparing the JSON `export`, the `show` output may be useful when
# debugging.
"$(LLVM_BIN_DIR)"/llvm-cov show \
--Xdemangler="$(RUST_DEMANGLER)" \
--show-line-counts-or-regions \
--instr-profile="$(TMPDIR)"/main.profdata \
$(call BIN,"$(TMPDIR)"/main) \
> "$(TMPDIR)"/actual_show_coverage.txt
# Compare the show coverage output
$(DIFF) typical_show_coverage.txt "$(TMPDIR)"/actual_show_coverage.txt || \
>&2 echo 'diff failed for `llvm-cov show` (might not be an error)'
# Generate a coverage report in JSON, using `llvm-cov export`, and fail if
# there are differences from the expected output.
"$(LLVM_BIN_DIR)"/llvm-cov export \
--summary-only \
--instr-profile="$(TMPDIR)"/main.profdata \
$(call BIN,"$(TMPDIR)"/main) \
| "$(PYTHON)" prettify_json.py \
> "$(TMPDIR)"/actual_export_coverage.json
# Check that the exported JSON coverage data matches what we expect
$(DIFF) expected_export_coverage.json "$(TMPDIR)"/actual_export_coverage.json

View file

@ -0,0 +1,59 @@
{
"data": [
{
"files": [
{
"filename": "main.rs",
"summary": {
"functions": {
"count": 7,
"covered": 5,
"percent": 71.42857142857143
},
"instantiations": {
"count": 8,
"covered": 6,
"percent": 75
},
"lines": {
"count": 30,
"covered": 25,
"percent": 83.33333333333334
},
"regions": {
"count": 7,
"covered": 5,
"notcovered": 2,
"percent": 71.42857142857143
}
}
}
],
"totals": {
"functions": {
"count": 7,
"covered": 5,
"percent": 71.42857142857143
},
"instantiations": {
"count": 8,
"covered": 6,
"percent": 75
},
"lines": {
"count": 30,
"covered": 25,
"percent": 83.33333333333334
},
"regions": {
"count": 7,
"covered": 5,
"notcovered": 2,
"percent": 71.42857142857143
}
}
}
],
"type": "llvm.coverage.json.export",
"version": "2.0.0"
}

View file

@ -0,0 +1,38 @@
pub fn will_be_called() -> &'static str {
let val = "called";
println!("{}", val);
val
}
pub fn will_not_be_called() -> bool {
println!("should not have been called");
false
}
pub fn print<T>(left: &str, value: T, right: &str)
where
T: std::fmt::Display,
{
println!("{}{}{}", left, value, right);
}
pub fn wrap_with<F, T>(inner: T, should_wrap: bool, wrapper: F)
where
F: FnOnce(&T)
{
if should_wrap {
wrapper(&inner)
}
}
fn main() {
let less = 1;
let more = 100;
if less < more {
wrap_with(will_be_called(), less < more, |inner| print(" ***", inner, "*** "));
wrap_with(will_be_called(), more < less, |inner| print(" ***", inner, "*** "));
} else {
wrap_with(will_not_be_called(), true, |inner| print("wrapped result is: ", inner, ""));
}
}

View file

@ -0,0 +1,9 @@
#!/usr/bin/env python
import sys
import json
# Try to decode line in order to ensure it is a valid JSON document
for line in sys.stdin:
parsed = json.loads(line)
print (json.dumps(parsed, indent=2, separators=(',', ': '), sort_keys=True))

View file

@ -0,0 +1,55 @@
1| 2|pub fn will_be_called() -> &'static str {
2| 2| let val = "called";
3| 2| println!("{}", val);
4| 2| val
5| 2|}
6| |
7| 0|pub fn will_not_be_called() -> bool {
8| 0| println!("should not have been called");
9| 0| false
10| 0|}
11| |
12| |pub fn print<T>(left: &str, value: T, right: &str)
13| |where
14| | T: std::fmt::Display,
15| 1|{
16| 1| println!("{}{}{}", left, value, right);
17| 1|}
18| |
19| |pub fn wrap_with<F, T>(inner: T, should_wrap: bool, wrapper: F)
20| |where
21| | F: FnOnce(&T)
22| 2|{
23| 2| if should_wrap {
24| 2| wrapper(&inner)
25| 2| }
26| 2|}
------------------
| main[317d481089b8c8fe]::wrap_with::<main[317d481089b8c8fe]::main::{closure#0}, &str>:
| 22| 1|{
| 23| 1| if should_wrap {
| 24| 1| wrapper(&inner)
| 25| 1| }
| 26| 1|}
------------------
| main[317d481089b8c8fe]::wrap_with::<main[317d481089b8c8fe]::main::{closure#1}, &str>:
| 22| 1|{
| 23| 1| if should_wrap {
| 24| 1| wrapper(&inner)
| 25| 1| }
| 26| 1|}
------------------
27| |
28| 1|fn main() {
29| 1| let less = 1;
30| 1| let more = 100;
31| 1|
32| 1| if less < more {
33| 1| wrap_with(will_be_called(), less < more, |inner| print(" ***", inner, "*** "));
34| 1| wrap_with(will_be_called(), more < less, |inner| print(" ***", inner, "*** "));
^0
35| 1| } else {
36| 1| wrap_with(will_not_be_called(), true, |inner| print("wrapped result is: ", inner, ""));
37| 1| }
38| 1|}

View file

@ -18,6 +18,9 @@ endif
HTMLDOCCK := '$(PYTHON)' '$(S)/src/etc/htmldocck.py'
CGREP := "$(S)/src/etc/cat-and-grep.sh"
# diff with common flags for multi-platform diffs against text output
DIFF := diff -u --strip-trailing-cr
# This is the name of the binary we will generate and run; use this
# e.g. for `$(CC) -o $(RUN_BINFILE)`.
RUN_BINFILE = $(TMPDIR)/$(1)

View file

@ -1,10 +1,8 @@
// aux-build:rlib-crate-test.rs
// ignore-tidy-linelength
// ignore-cross-compile gives a different error message
#![feature(plugin)]
#![plugin(rlib_crate_test)]
//~^ ERROR: plugin `rlib_crate_test` only found in rlib format, but must be available in dylib format
//~| WARN use of deprecated attribute `plugin`: compiler plugins are deprecated
//~^ ERROR: plugin `rlib_crate_test` only found in rlib format, but must be available in dylib
fn main() {}

View file

@ -1,16 +1,8 @@
error[E0457]: plugin `rlib_crate_test` only found in rlib format, but must be available in dylib format
--> $DIR/macro-crate-rlib.rs:6:11
--> $DIR/macro-crate-rlib.rs:5:11
|
LL | #![plugin(rlib_crate_test)]
| ^^^^^^^^^^^^^^^
warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
--> $DIR/macro-crate-rlib.rs:6:1
|
LL | #![plugin(rlib_crate_test)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
|
= note: `#[warn(deprecated)]` on by default
error: aborting due to previous error; 1 warning emitted
error: aborting due to previous error

View file

@ -0,0 +1,11 @@
#![feature(const_generics)] //~ WARN the feature `const_generics` is incomplete
fn foo<const N: usize, const A: [u8; N]>() {}
//~^ ERROR the type of const parameters must not
fn main() {
foo::<_, {[1]}>();
//~^ ERROR wrong number of const arguments
//~| ERROR wrong number of type arguments
//~| ERROR mismatched types
}

View file

@ -0,0 +1,37 @@
error[E0770]: the type of const parameters must not depend on other generic parameters
--> $DIR/issue-62878.rs:3:38
|
LL | fn foo<const N: usize, const A: [u8; N]>() {}
| ^ the type must not depend on the parameter `N`
warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
--> $DIR/issue-62878.rs:1:12
|
LL | #![feature(const_generics)]
| ^^^^^^^^^^^^^^
|
= note: `#[warn(incomplete_features)]` on by default
= note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
error[E0107]: wrong number of const arguments: expected 2, found 1
--> $DIR/issue-62878.rs:7:5
|
LL | foo::<_, {[1]}>();
| ^^^^^^^^^^^^^^^ expected 2 const arguments
error[E0107]: wrong number of type arguments: expected 0, found 1
--> $DIR/issue-62878.rs:7:11
|
LL | foo::<_, {[1]}>();
| ^ unexpected type argument
error[E0308]: mismatched types
--> $DIR/issue-62878.rs:7:15
|
LL | foo::<_, {[1]}>();
| ^^^ expected `usize`, found array `[{integer}; 1]`
error: aborting due to 4 previous errors; 1 warning emitted
Some errors have detailed explanations: E0107, E0308, E0770.
For more information about an error, try `rustc --explain E0107`.

View file

@ -0,0 +1,17 @@
// run-pass
#![feature(const_fn)]
#![feature(const_unreachable_unchecked)]
const unsafe fn foo(x: bool) -> bool {
match x {
true => true,
false => std::hint::unreachable_unchecked(),
}
}
const BAR: bool = unsafe { foo(true) };
fn main() {
assert_eq!(BAR, true);
}

View file

@ -0,0 +1,20 @@
// build-fail
#![feature(const_fn)]
#![feature(const_unreachable_unchecked)]
const unsafe fn foo(x: bool) -> bool {
match x {
true => true,
false => std::hint::unreachable_unchecked(),
}
}
#[warn(const_err)]
const BAR: bool = unsafe { foo(false) };
fn main() {
assert_eq!(BAR, true);
//~^ ERROR E0080
//~| ERROR erroneous constant
}

View file

@ -0,0 +1,44 @@
warning: any use of this value will cause an error
--> $SRC_DIR/libcore/hint.rs:LL:COL
|
LL | unsafe { intrinsics::unreachable() }
| ^^^^^^^^^^^^^^^^^^^^^^^^^
| |
| entering unreachable code
| inside `std::hint::unreachable_unchecked` at $SRC_DIR/libcore/hint.rs:LL:COL
| inside `foo` at $DIR/const_unsafe_unreachable_ub.rs:9:18
| inside `BAR` at $DIR/const_unsafe_unreachable_ub.rs:14:28
|
::: $DIR/const_unsafe_unreachable_ub.rs:14:1
|
LL | const BAR: bool = unsafe { foo(false) };
| ----------------------------------------
|
note: the lint level is defined here
--> $DIR/const_unsafe_unreachable_ub.rs:13:8
|
LL | #[warn(const_err)]
| ^^^^^^^^^
error[E0080]: evaluation of constant expression failed
--> $DIR/const_unsafe_unreachable_ub.rs:17:3
|
LL | assert_eq!(BAR, true);
| ^^^^^^^^^^^---^^^^^^^^
| |
| referenced constant has errors
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: erroneous constant used
--> $DIR/const_unsafe_unreachable_ub.rs:17:3
|
LL | assert_eq!(BAR, true);
| ^^^^^^^^^^^^^^^^^^^^^^ referenced constant has errors
|
= note: `#[deny(const_err)]` on by default
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 2 previous errors; 1 warning emitted
For more information about this error, try `rustc --explain E0080`.

View file

@ -1,6 +1,6 @@
// run-pass
#![allow(unused_variables)]
// compile-flags: --extern LooksLikeExternCrate
// compile-flags: --extern LooksLikeExternCrate=/path/to/nowhere
mod m {
pub struct LooksLikeExternCrate;

View file

@ -1,6 +1,5 @@
#![feature(non_ascii_idents)]
extern crate ьаг; //~ ERROR cannot load a crate with a non-ascii name `ьаг`
//~| ERROR can't find crate for `ьаг`
fn main() {}

View file

@ -4,12 +4,5 @@ error: cannot load a crate with a non-ascii name `ьаг`
LL | extern crate ьаг;
| ^^^^^^^^^^^^^^^^^
error[E0463]: can't find crate for `ьаг`
--> $DIR/crate_name_nonascii_forbidden-1.rs:3:1
|
LL | extern crate ьаг;
| ^^^^^^^^^^^^^^^^^ can't find crate
error: aborting due to previous error
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0463`.

View file

@ -3,7 +3,5 @@
#![feature(non_ascii_idents)]
use му_сгате::baz; //~ ERROR cannot load a crate with a non-ascii name `му_сгате`
//~| can't find crate for `му_сгате`
fn main() {}

View file

@ -4,12 +4,5 @@ error: cannot load a crate with a non-ascii name `му_сгате`
LL | use му_сгате::baz;
| ^^^^^^^^
error[E0463]: can't find crate for `му_сгате`
--> $DIR/crate_name_nonascii_forbidden-2.rs:5:5
|
LL | use му_сгате::baz;
| ^^^^^^^^ can't find crate
error: aborting due to previous error
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0463`.

View file

@ -186,6 +186,9 @@ pub struct Config {
/// The rustdoc executable.
pub rustdoc_path: Option<PathBuf>,
/// The rust-demangler executable.
pub rust_demangler_path: Option<PathBuf>,
/// The Python executable to use for LLDB.
pub lldb_python: String,

View file

@ -53,6 +53,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
.reqopt("", "run-lib-path", "path to target shared libraries", "PATH")
.reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH")
.optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH")
.optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
.reqopt("", "lldb-python", "path to python to use for doc tests", "PATH")
.reqopt("", "docck-python", "path to python to use for doc tests", "PATH")
.optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM")
@ -182,6 +183,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
rustc_path: opt_path(matches, "rustc-path"),
rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
lldb_python: matches.opt_str("lldb-python").unwrap(),
docck_python: matches.opt_str("docck-python").unwrap(),
valgrind_path: matches.opt_str("valgrind-path"),
@ -246,6 +248,7 @@ pub fn log_config(config: &Config) {
logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path));
logv(c, format!("src_base: {:?}", config.src_base.display()));
logv(c, format!("build_base: {:?}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));
@ -479,6 +482,8 @@ fn common_inputs_stamp(config: &Config) -> Stamp {
stamp.add_path(&rustdoc_path);
stamp.add_path(&rust_src_dir.join("src/etc/htmldocck.py"));
}
// FIXME(richkadel): Do I need to add an `if let Some(rust_demangler_path) contribution to the
// stamp here as well?
// Compiletest itself.
stamp.add_dir(&rust_src_dir.join("src/tools/compiletest/"));

View file

@ -2739,6 +2739,10 @@ impl<'test> TestCx<'test> {
cmd.env("RUSTDOC", cwd.join(rustdoc));
}
if let Some(ref rust_demangler) = self.config.rust_demangler_path {
cmd.env("RUST_DEMANGLER", cwd.join(rust_demangler));
}
if let Some(ref node) = self.config.nodejs {
cmd.env("NODE", node);
}

View file

@ -23,6 +23,32 @@ use std::rc::Rc;
use crate::Redirect::*;
// Add linkcheck exceptions here
// If at all possible you should use intra-doc links to avoid linkcheck issues. These
// are cases where that does not work
// [(generated_documentation_page, &[broken_links])]
const LINKCHECK_EXCEPTIONS: &[(&str, &[&str])] = &[
// These are methods on slice, and `Self` does not work on primitive impls
// in intra-doc links (primitive impls are weird)
// https://github.com/rust-lang/rust/issues/62834 is necessary to be
// able to link to slices
(
"std/io/struct.IoSlice.html",
&[
"#method.as_mut_ptr",
"#method.sort_by_key",
"#method.make_ascii_uppercase",
"#method.make_ascii_lowercase",
],
),
// These try to link to std::collections, but are defined in alloc
// https://github.com/rust-lang/rust/issues/74481
("std/collections/btree_map/struct.BTreeMap.html", &["#insert-and-complex-keys"]),
("std/collections/btree_set/struct.BTreeSet.html", &["#insert-and-complex-keys"]),
("alloc/collections/btree_map/struct.BTreeMap.html", &["#insert-and-complex-keys"]),
("alloc/collections/btree_set/struct.BTreeSet.html", &["#insert-and-complex-keys"]),
];
macro_rules! t {
($e:expr) => {
match $e {
@ -111,35 +137,20 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {
}
}
fn is_exception(file: &Path, link: &str) -> bool {
if let Some(entry) = LINKCHECK_EXCEPTIONS.iter().find(|&(f, _)| file.ends_with(f)) {
entry.1.contains(&link)
} else {
false
}
}
fn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option<PathBuf> {
// Ignore non-HTML files.
if file.extension().and_then(|s| s.to_str()) != Some("html") {
return None;
}
// Unfortunately we're not 100% full of valid links today to we need a few
// exceptions to get this past `make check` today.
// FIXME(#32129)
if file.ends_with("std/io/struct.IoSlice.html")
|| file.ends_with("std/string/struct.String.html")
{
return None;
}
// FIXME(#32553)
if file.ends_with("alloc/string/struct.String.html") {
return None;
}
// FIXME(#32130)
if file.ends_with("alloc/collections/btree_map/struct.BTreeMap.html")
|| file.ends_with("alloc/collections/btree_set/struct.BTreeSet.html")
|| file.ends_with("std/collections/btree_map/struct.BTreeMap.html")
|| file.ends_with("std/collections/btree_set/struct.BTreeSet.html")
|| file.ends_with("std/collections/hash_map/struct.HashMap.html")
|| file.ends_with("std/collections/hash_set/struct.HashSet.html")
{
return None;
}
let res = load_file(cache, root, file, SkipRedirect);
let (pretty_file, contents) = match res {
Ok(res) => res,
@ -254,17 +265,20 @@ fn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Opti
let entry = &mut cache.get_mut(&pretty_path).unwrap();
entry.parse_ids(&pretty_path, &contents, errors);
if !entry.ids.contains(*fragment) {
if !entry.ids.contains(*fragment) && !is_exception(file, &format!("#{}", fragment))
{
*errors = true;
print!("{}:{}: broken link fragment ", pretty_file.display(), i + 1);
println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
};
}
} else {
*errors = true;
print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
let pretty_path = path.strip_prefix(root).unwrap_or(&path);
println!("{}", pretty_path.display());
if !is_exception(file, pretty_path.to_str().unwrap()) {
*errors = true;
print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
println!("{}", pretty_path.display());
}
}
});
Some(pretty_file)

View file

@ -0,0 +1,12 @@
[package]
authors = ["The Rust Project Developers"]
name = "rust-demangler"
version = "0.0.0"
edition = "2018"
[dependencies]
rustc-demangle = "0.1"
[[bin]]
name = "rust-demangler"
path = "main.rs"

Some files were not shown because too many files have changed in this diff Show more