Merge commit '98ed962c7d
' into master
This commit is contained in:
commit
7cbe50e209
68 changed files with 2682 additions and 1135 deletions
|
@ -49,11 +49,11 @@ jobs:
|
||||||
# `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests.
|
# `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests.
|
||||||
run: sudo apt-get install ninja-build ripgrep llvm-14-tools
|
run: sudo apt-get install ninja-build ripgrep llvm-14-tools
|
||||||
|
|
||||||
- name: Install rustfmt
|
- name: Install rustfmt & clippy
|
||||||
run: rustup component add rustfmt
|
run: rustup component add rustfmt clippy
|
||||||
|
|
||||||
- name: Download artifact
|
- name: Download artifact
|
||||||
run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/${{ matrix.libgccjit_version.gcc }}
|
run: curl -LO https://github.com/rust-lang/gcc/releases/latest/download/${{ matrix.libgccjit_version.gcc }}
|
||||||
|
|
||||||
- name: Setup path to libgccjit
|
- name: Setup path to libgccjit
|
||||||
run: |
|
run: |
|
||||||
|
@ -78,8 +78,15 @@ jobs:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
./y.sh prepare --only-libcore
|
./y.sh prepare --only-libcore
|
||||||
./y.sh build
|
./y.sh build --sysroot
|
||||||
cargo test
|
cargo test
|
||||||
|
|
||||||
|
- name: Run y.sh cargo build
|
||||||
|
run: |
|
||||||
|
./y.sh cargo build --manifest-path tests/hello-world/Cargo.toml
|
||||||
|
|
||||||
|
- name: Clean
|
||||||
|
run: |
|
||||||
./y.sh clean all
|
./y.sh clean all
|
||||||
|
|
||||||
- name: Prepare dependencies
|
- name: Prepare dependencies
|
||||||
|
@ -96,7 +103,12 @@ jobs:
|
||||||
./y.sh test --release --clean --build-sysroot ${{ matrix.commands }}
|
./y.sh test --release --clean --build-sysroot ${{ matrix.commands }}
|
||||||
|
|
||||||
- name: Check formatting
|
- name: Check formatting
|
||||||
run: cargo fmt -- --check
|
run: ./y.sh fmt --check
|
||||||
|
|
||||||
|
- name: clippy
|
||||||
|
run: |
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
cargo clippy --all-targets --features master -- -D warnings
|
||||||
|
|
||||||
duplicates:
|
duplicates:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -56,7 +56,7 @@ jobs:
|
||||||
|
|
||||||
- name: Download artifact
|
- name: Download artifact
|
||||||
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||||
run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/gcc-13.deb
|
run: curl -LO https://github.com/rust-lang/gcc/releases/latest/download/gcc-13.deb
|
||||||
|
|
||||||
- name: Setup path to libgccjit
|
- name: Setup path to libgccjit
|
||||||
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||||
|
@ -94,7 +94,20 @@ jobs:
|
||||||
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
# TODO: re-enable those tests for libgccjit 12.
|
||||||
|
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||||
id: tests
|
id: tests
|
||||||
run: |
|
run: |
|
||||||
${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log
|
${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log
|
||||||
rg --text "test result" output_log >> $GITHUB_STEP_SUMMARY
|
rg --text "test result" output_log >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
- name: Run failing ui pattern tests for ICE
|
||||||
|
# TODO: re-enable those tests for libgccjit 12.
|
||||||
|
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||||
|
id: ui-tests
|
||||||
|
run: |
|
||||||
|
${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --test-failing-ui-pattern-tests ${{ matrix.libgccjit_version.extra }} | tee output_log_ui
|
||||||
|
if grep -q "the compiler unexpectedly panicked" output_log_ui; then
|
||||||
|
echo "Error: 'the compiler unexpectedly panicked' found in output logs. CI Error!!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
|
@ -68,21 +68,23 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
./y.sh prepare --only-libcore --libgccjit12-patches
|
./y.sh prepare --only-libcore --libgccjit12-patches
|
||||||
./y.sh build --no-default-features --sysroot-panic-abort
|
./y.sh build --no-default-features --sysroot-panic-abort
|
||||||
cargo test --no-default-features
|
# Uncomment when we no longer need to remove global variables.
|
||||||
./y.sh clean all
|
#./y.sh build --sysroot --no-default-features --sysroot-panic-abort
|
||||||
|
#cargo test --no-default-features
|
||||||
|
#./y.sh clean all
|
||||||
|
|
||||||
- name: Prepare dependencies
|
#- name: Prepare dependencies
|
||||||
run: |
|
#run: |
|
||||||
git config --global user.email "user@example.com"
|
#git config --global user.email "user@example.com"
|
||||||
git config --global user.name "User"
|
#git config --global user.name "User"
|
||||||
./y.sh prepare --libgccjit12-patches
|
#./y.sh prepare --libgccjit12-patches
|
||||||
|
|
||||||
- name: Add more failing tests for GCC 12
|
#- name: Add more failing tests for GCC 12
|
||||||
run: cat tests/failing-ui-tests12.txt >> tests/failing-ui-tests.txt
|
#run: cat tests/failing-ui-tests12.txt >> tests/failing-ui-tests.txt
|
||||||
|
|
||||||
- name: Add more failing tests because the sysroot is not compiled with LTO
|
#- name: Add more failing tests because the sysroot is not compiled with LTO
|
||||||
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
#run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||||
|
|
||||||
- name: Run tests
|
#- name: Run tests
|
||||||
run: |
|
#run: |
|
||||||
./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features
|
#./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features
|
||||||
|
|
|
@ -54,13 +54,7 @@ jobs:
|
||||||
run: curl -LO https://github.com/cross-cg-gcc-tools/cross-gcc/releases/latest/download/gcc-m68k-13.deb
|
run: curl -LO https://github.com/cross-cg-gcc-tools/cross-gcc/releases/latest/download/gcc-m68k-13.deb
|
||||||
|
|
||||||
- name: Download VM artifact
|
- name: Download VM artifact
|
||||||
uses: dawidd6/action-download-artifact@v2
|
run: curl -LO https://github.com/cross-cg-gcc-tools/vms/releases/latest/download/debian-m68k.img
|
||||||
with:
|
|
||||||
workflow: m68k.yml
|
|
||||||
name: debian-m68k
|
|
||||||
repo: cross-cg-gcc-tools/vms
|
|
||||||
branch: master
|
|
||||||
event: push
|
|
||||||
|
|
||||||
- name: Setup path to libgccjit
|
- name: Setup path to libgccjit
|
||||||
run: |
|
run: |
|
||||||
|
@ -88,10 +82,17 @@ jobs:
|
||||||
sudo mount debian-m68k.img vm
|
sudo mount debian-m68k.img vm
|
||||||
sudo cp $(which qemu-m68k-static) vm/usr/bin/
|
sudo cp $(which qemu-m68k-static) vm/usr/bin/
|
||||||
|
|
||||||
|
- name: Build sample project with target defined as JSON spec
|
||||||
|
run: |
|
||||||
|
./y.sh prepare --only-libcore --cross
|
||||||
|
./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json
|
||||||
|
./y.sh cargo build --manifest-path=./tests/hello-world/Cargo.toml --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json
|
||||||
|
./y.sh clean all
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
./y.sh prepare --only-libcore --cross
|
./y.sh prepare --only-libcore --cross
|
||||||
./y.sh build --target-triple m68k-unknown-linux-gnu
|
./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu
|
||||||
CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test
|
CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test
|
||||||
./y.sh clean all
|
./y.sh clean all
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ jobs:
|
||||||
run: sudo apt-get install ninja-build ripgrep
|
run: sudo apt-get install ninja-build ripgrep
|
||||||
|
|
||||||
- name: Download artifact
|
- name: Download artifact
|
||||||
run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/gcc-13.deb
|
run: curl -LO https://github.com/rust-lang/gcc/releases/latest/download/gcc-13.deb
|
||||||
|
|
||||||
- name: Setup path to libgccjit
|
- name: Setup path to libgccjit
|
||||||
run: |
|
run: |
|
||||||
|
@ -53,7 +53,7 @@ jobs:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
./y.sh prepare --only-libcore
|
./y.sh prepare --only-libcore
|
||||||
EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot
|
EMBED_LTO_BITCODE=1 ./y.sh build --sysroot --release --release-sysroot
|
||||||
cargo test
|
cargo test
|
||||||
./y.sh clean all
|
./y.sh clean all
|
||||||
|
|
||||||
|
@ -62,12 +62,12 @@ jobs:
|
||||||
git config --global user.email "user@example.com"
|
git config --global user.email "user@example.com"
|
||||||
git config --global user.name "User"
|
git config --global user.name "User"
|
||||||
./y.sh prepare
|
./y.sh prepare
|
||||||
# FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros.
|
|
||||||
echo -n 'lto = "fat"' >> build_sysroot/Cargo.toml
|
|
||||||
|
|
||||||
- name: Add more failing tests because of undefined symbol errors (FIXME)
|
- name: Add more failing tests because of undefined symbol errors (FIXME)
|
||||||
run: cat tests/failing-lto-tests.txt >> tests/failing-ui-tests.txt
|
run: cat tests/failing-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
|
# FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros.
|
||||||
|
echo -n 'lto = "fat"' >> build_system/build_sysroot/Cargo.toml
|
||||||
EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }}
|
EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }}
|
||||||
|
|
|
@ -58,7 +58,7 @@ jobs:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
./y.sh prepare --only-libcore
|
./y.sh prepare --only-libcore
|
||||||
./y.sh build --release --release-sysroot
|
./y.sh build --sysroot --release --release-sysroot
|
||||||
|
|
||||||
- name: Set env (part 2)
|
- name: Set env (part 2)
|
||||||
run: |
|
run: |
|
||||||
|
@ -89,12 +89,11 @@ jobs:
|
||||||
- name: Run stdarch tests
|
- name: Run stdarch tests
|
||||||
if: ${{ !matrix.cargo_runner }}
|
if: ${{ !matrix.cargo_runner }}
|
||||||
run: |
|
run: |
|
||||||
cd build_sysroot/sysroot_src/library/stdarch/
|
CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ./y.sh cargo test --manifest-path build/build_sysroot/sysroot_src/library/stdarch/Cargo.toml
|
||||||
CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../y.sh cargo test
|
|
||||||
|
|
||||||
- name: Run stdarch tests
|
- name: Run stdarch tests
|
||||||
if: ${{ matrix.cargo_runner }}
|
if: ${{ matrix.cargo_runner }}
|
||||||
run: |
|
run: |
|
||||||
cd build_sysroot/sysroot_src/library/stdarch/
|
|
||||||
# FIXME: these tests fail when the sysroot is compiled with LTO because of a missing symbol in proc-macro.
|
# FIXME: these tests fail when the sysroot is compiled with LTO because of a missing symbol in proc-macro.
|
||||||
STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../y.sh cargo test -- --skip rtm --skip tbm --skip sse4a
|
# TODO: remove --skip test_mm512_stream_ps when stdarch is updated in rustc.
|
||||||
|
STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ./y.sh cargo test --manifest-path build/build_sysroot/sysroot_src/library/stdarch/Cargo.toml -- --skip rtm --skip tbm --skip sse4a --skip test_mm512_stream_ps
|
||||||
|
|
4
compiler/rustc_codegen_gcc/.gitignore
vendored
4
compiler/rustc_codegen_gcc/.gitignore
vendored
|
@ -6,10 +6,6 @@ perf.data
|
||||||
perf.data.old
|
perf.data.old
|
||||||
*.events
|
*.events
|
||||||
*.string*
|
*.string*
|
||||||
/build_sysroot/sysroot
|
|
||||||
/build_sysroot/sysroot_src
|
|
||||||
/build_sysroot/Cargo.lock
|
|
||||||
/build_sysroot/test_target/Cargo.lock
|
|
||||||
gimple*
|
gimple*
|
||||||
*asm
|
*asm
|
||||||
res
|
res
|
||||||
|
|
|
@ -80,8 +80,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gccjit"
|
name = "gccjit"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/rust-lang/gccjit.rs#328cb1b414f67dfa15162ba7a55ed01931f1b219"
|
||||||
checksum = "ecaa4c3da2d74c1a991b4faff75d49ab1d0522d9a99d8e2614b3b04d226417ce"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gccjit_sys",
|
"gccjit_sys",
|
||||||
]
|
]
|
||||||
|
@ -89,8 +88,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gccjit_sys"
|
name = "gccjit_sys"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/rust-lang/gccjit.rs#328cb1b414f67dfa15162ba7a55ed01931f1b219"
|
||||||
checksum = "406a66fba005f1a02661f2f9443e5693dd3a667b7c58e70aa4ccc4c8b50b4758"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
|
@ -22,7 +22,7 @@ master = ["gccjit/master"]
|
||||||
default = ["master"]
|
default = ["master"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
gccjit = "2.0"
|
gccjit = "2.1"
|
||||||
|
|
||||||
# Local copy.
|
# Local copy.
|
||||||
#gccjit = { path = "../gccjit.rs" }
|
#gccjit = { path = "../gccjit.rs" }
|
||||||
|
|
|
@ -12,6 +12,14 @@ This is a GCC codegen for rustc, which means it can be loaded by the existing ru
|
||||||
The primary goal of this project is to be able to compile Rust code on platforms unsupported by LLVM.
|
The primary goal of this project is to be able to compile Rust code on platforms unsupported by LLVM.
|
||||||
A secondary goal is to check if using the gcc backend will provide any run-time speed improvement for the programs compiled using rustc.
|
A secondary goal is to check if using the gcc backend will provide any run-time speed improvement for the programs compiled using rustc.
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
**rustup:** Follow the instructions on the official [website](https://www.rust-lang.org/tools/install)
|
||||||
|
|
||||||
|
**DejaGnu:** Consider to install DejaGnu which is necessary for running the libgccjit test suite. [website](https://www.gnu.org/software/dejagnu/#downloading)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
**This requires a patched libgccjit in order to work.
|
**This requires a patched libgccjit in order to work.
|
||||||
|
@ -80,7 +88,7 @@ Then you can run commands like this:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ ./y.sh prepare # download and patch sysroot src and install hyperfine for benchmarking
|
$ ./y.sh prepare # download and patch sysroot src and install hyperfine for benchmarking
|
||||||
$ ./y.sh build --release
|
$ ./y.sh build --sysroot --release
|
||||||
```
|
```
|
||||||
|
|
||||||
To run the tests:
|
To run the tests:
|
||||||
|
@ -91,10 +99,16 @@ $ ./y.sh test --release
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
`$CG_GCCJIT_DIR` is the directory you cloned this repo into in the following instructions:
|
You have to run these commands, in the corresponding order:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
export CG_GCCJIT_DIR=[the full path to rustc_codegen_gcc]
|
$ ./y.sh prepare
|
||||||
|
$ ./y.sh build --sysroot
|
||||||
|
```
|
||||||
|
To check if all is working correctly, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ./y.sh cargo build --manifest-path tests/hello-world/Cargo.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
### Cargo
|
### Cargo
|
||||||
|
@ -107,8 +121,7 @@ If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./y
|
||||||
|
|
||||||
### LTO
|
### LTO
|
||||||
|
|
||||||
To use LTO, you need to set the variable `FAT_LTO=1` and `EMBED_LTO_BITCODE=1` in addition to setting `lto = "fat"` in the `Cargo.toml`.
|
To use LTO, you need to set the variable `EMBED_LTO_BITCODE=1` in addition to setting `lto = "fat"` in the `Cargo.toml`.
|
||||||
Don't set `FAT_LTO` when compiling the sysroot, though: only set `EMBED_LTO_BITCODE=1`.
|
|
||||||
|
|
||||||
Failing to set `EMBED_LTO_BITCODE` will give you the following error:
|
Failing to set `EMBED_LTO_BITCODE` will give you the following error:
|
||||||
|
|
||||||
|
@ -118,7 +131,13 @@ error: failed to copy bitcode to object file: No such file or directory (os erro
|
||||||
|
|
||||||
### Rustc
|
### Rustc
|
||||||
|
|
||||||
> You should prefer using the Cargo method.
|
If you want to run `rustc` directly, you can do so with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ./y.sh rustc my_crate.rs
|
||||||
|
```
|
||||||
|
|
||||||
|
You can do the same manually (although we don't recommend it):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ LIBRARY_PATH="[gcc-path value]" LD_LIBRARY_PATH="[gcc-path value]" rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
|
$ LIBRARY_PATH="[gcc-path value]" LD_LIBRARY_PATH="[gcc-path value]" rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
|
||||||
|
@ -126,18 +145,19 @@ $ LIBRARY_PATH="[gcc-path value]" LD_LIBRARY_PATH="[gcc-path value]" rustc +$(ca
|
||||||
|
|
||||||
## Env vars
|
## Env vars
|
||||||
|
|
||||||
<dl>
|
* _**CG_GCCJIT_DUMP_ALL_MODULES**_: Enables dumping of all compilation modules. When set to "1", a dump is created for each module during compilation and stored in `/tmp/reproducers/`.
|
||||||
<dt>CG_GCCJIT_INCR_CACHE_DISABLED</dt>
|
* _**CG_GCCJIT_DUMP_MODULE**_: Enables dumping of a specific module. When set with the module name, e.g., `CG_GCCJIT_DUMP_MODULE=module_name`, a dump of that specific module is created in `/tmp/reproducers/`.
|
||||||
<dd>Don't cache object files in the incremental cache. Useful during development of cg_gccjit
|
* _**CG_RUSTFLAGS**_: Send additional flags to rustc. Can be used to build the sysroot without unwinding by setting `CG_RUSTFLAGS=-Cpanic=abort`.
|
||||||
to make it possible to use incremental mode for all analyses performed by rustc without caching
|
* _**CG_GCCJIT_DUMP_TO_FILE**_: Dump a C-like representation to /tmp/gccjit_dumps and enable debug info in order to debug this C-like representation.
|
||||||
object files when their content should have been changed by a change to cg_gccjit.</dd>
|
* _**CG_GCCJIT_DUMP_RTL**_: Dumps RTL (Register Transfer Language) for virtual registers.
|
||||||
<dt>CG_GCCJIT_DISPLAY_CG_TIME</dt>
|
* _**CG_GCCJIT_DUMP_RTL_ALL**_: Dumps all RTL passes.
|
||||||
<dd>Display the time it took to perform codegen for a crate</dd>
|
* _**CG_GCCJIT_DUMP_TREE_ALL**_: Dumps all tree (GIMPLE) passes.
|
||||||
<dt>CG_RUSTFLAGS</dt>
|
* _**CG_GCCJIT_DUMP_IPA_ALL**_: Dumps all Interprocedural Analysis (IPA) passes.
|
||||||
<dd>Send additional flags to rustc. Can be used to build the sysroot without unwinding by setting `CG_RUSTFLAGS=-Cpanic=abort`.</dd>
|
* _**CG_GCCJIT_DUMP_CODE**_: Dumps the final generated code.
|
||||||
<dt>CG_GCCJIT_DUMP_TO_FILE</dt>
|
* _**CG_GCCJIT_DUMP_GIMPLE**_: Dumps the initial GIMPLE representation.
|
||||||
<dd>Dump a C-like representation to /tmp/gccjit_dumps and enable debug info in order to debug this C-like representation.</dd>
|
* _**CG_GCCJIT_DUMP_EVERYTHING**_: Enables dumping of all intermediate representations and passes.
|
||||||
</dl>
|
* _**CG_GCCJIT_KEEP_INTERMEDIATES**_: Keeps intermediate files generated during the compilation process.
|
||||||
|
* _**CG_GCCJIT_VERBOSE**_: Enables verbose output from the GCC driver.
|
||||||
|
|
||||||
## Extra documentation
|
## Extra documentation
|
||||||
|
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
// TODO: remove this file and deps/libLLVM-18-rust-1.78.0-nightly.so when
|
|
||||||
// https://github.com/rust-lang/rust/pull/121967 is merged.
|
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=deps/libLLVM-18-rust-1.78.0-nightly.so");
|
|
||||||
println!("cargo:rustc-link-search=deps");
|
|
||||||
}
|
|
433
compiler/rustc_codegen_gcc/build_system/build_sysroot/Cargo.lock
Normal file
433
compiler/rustc_codegen_gcc/build_system/build_sysroot/Cargo.lock
Normal file
|
@ -0,0 +1,433 @@
|
||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "addr2line"
|
||||||
|
version = "0.21.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"gimli",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alloc"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "allocator-api2"
|
||||||
|
version = "0.2.18"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "compiler_builtins"
|
||||||
|
version = "0.1.109"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f11973008a8cf741fe6d22f339eba21fd0ca81e2760a769ba8243ed6c21edd7e"
|
||||||
|
dependencies = [
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "core"
|
||||||
|
version = "0.0.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dlmalloc"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3264b043b8e977326c1ee9e723da2c1f8d09a99df52cacf00b4dbce5ac54414d"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"compiler_builtins",
|
||||||
|
"libc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fortanix-sgx-abi"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "57cafc2274c10fab234f176b25903ce17e690fca7597090d50880e047a0389c5"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "getopts"
|
||||||
|
version = "0.2.21"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
|
||||||
|
dependencies = [
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
"rustc-std-workspace-std",
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gimli"
|
||||||
|
version = "0.28.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hashbrown"
|
||||||
|
version = "0.14.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
||||||
|
dependencies = [
|
||||||
|
"allocator-api2",
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hermit-abi"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.153"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||||
|
dependencies = [
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memchr"
|
||||||
|
version = "2.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7"
|
||||||
|
dependencies = [
|
||||||
|
"adler",
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object"
|
||||||
|
version = "0.32.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"memchr",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "panic_abort"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"alloc",
|
||||||
|
"cfg-if",
|
||||||
|
"compiler_builtins",
|
||||||
|
"core",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "panic_unwind"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"alloc",
|
||||||
|
"cfg-if",
|
||||||
|
"compiler_builtins",
|
||||||
|
"core",
|
||||||
|
"libc",
|
||||||
|
"unwind",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc_macro"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"core",
|
||||||
|
"std",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "r-efi"
|
||||||
|
version = "4.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c47196f636c4cc0634b73b0405323d177753c2e15e866952c64ea22902567a34"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "r-efi-alloc"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "31d6f09fe2b6ad044bc3d2c34ce4979796581afd2f1ebc185837e02421e02fd7"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"r-efi",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-demangle"
|
||||||
|
version = "0.1.23"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-std-workspace-alloc"
|
||||||
|
version = "1.99.0"
|
||||||
|
dependencies = [
|
||||||
|
"alloc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-std-workspace-core"
|
||||||
|
version = "1.99.0"
|
||||||
|
dependencies = [
|
||||||
|
"core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-std-workspace-std"
|
||||||
|
version = "1.99.0"
|
||||||
|
dependencies = [
|
||||||
|
"std",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "std"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"addr2line",
|
||||||
|
"alloc",
|
||||||
|
"cfg-if",
|
||||||
|
"compiler_builtins",
|
||||||
|
"core",
|
||||||
|
"dlmalloc",
|
||||||
|
"fortanix-sgx-abi",
|
||||||
|
"hashbrown",
|
||||||
|
"hermit-abi",
|
||||||
|
"libc",
|
||||||
|
"miniz_oxide",
|
||||||
|
"object",
|
||||||
|
"panic_abort",
|
||||||
|
"panic_unwind",
|
||||||
|
"r-efi",
|
||||||
|
"r-efi-alloc",
|
||||||
|
"rustc-demangle",
|
||||||
|
"std_detect",
|
||||||
|
"unwind",
|
||||||
|
"wasi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "std_detect"
|
||||||
|
version = "0.1.5"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sysroot"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"alloc",
|
||||||
|
"compiler_builtins",
|
||||||
|
"core",
|
||||||
|
"proc_macro",
|
||||||
|
"std",
|
||||||
|
"test",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "test"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"core",
|
||||||
|
"getopts",
|
||||||
|
"libc",
|
||||||
|
"panic_abort",
|
||||||
|
"panic_unwind",
|
||||||
|
"std",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-width"
|
||||||
|
version = "0.1.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
"rustc-std-workspace-std",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unwind"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"compiler_builtins",
|
||||||
|
"core",
|
||||||
|
"libc",
|
||||||
|
"unwinding",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unwinding"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "37a19a21a537f635c16c7576f22d0f2f7d63353c1337ad4ce0d8001c7952a25b"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"gimli",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasi"
|
||||||
|
version = "0.11.0+wasi-snapshot-preview1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||||
|
dependencies = [
|
||||||
|
"compiler_builtins",
|
||||||
|
"rustc-std-workspace-alloc",
|
||||||
|
"rustc-std-workspace-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.52.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-targets"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||||
|
dependencies = [
|
||||||
|
"windows_aarch64_gnullvm",
|
||||||
|
"windows_aarch64_msvc",
|
||||||
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_gnullvm",
|
||||||
|
"windows_i686_msvc",
|
||||||
|
"windows_x86_64_gnu",
|
||||||
|
"windows_x86_64_gnullvm",
|
||||||
|
"windows_x86_64_msvc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_gnullvm"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_msvc"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnu"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnullvm"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_msvc"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnu"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnullvm"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_msvc"
|
||||||
|
version = "0.52.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
|
@ -1,12 +1,14 @@
|
||||||
[package]
|
[package]
|
||||||
authors = ["bjorn3 <bjorn3@users.noreply.github.com>"]
|
authors = ["rustc_codegen_gcc devs"]
|
||||||
name = "sysroot"
|
name = "sysroot"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
core = { path = "./sysroot_src/library/core" }
|
core = { path = "./sysroot_src/library/core" }
|
||||||
compiler_builtins = "0.1"
|
# TODO: after the sync, revert to using version 0.1.
|
||||||
|
# compiler_builtins = "0.1"
|
||||||
|
compiler_builtins = "=0.1.109"
|
||||||
alloc = { path = "./sysroot_src/library/alloc" }
|
alloc = { path = "./sysroot_src/library/alloc" }
|
||||||
std = { path = "./sysroot_src/library/std", features = ["panic_unwind", "backtrace"] }
|
std = { path = "./sysroot_src/library/std", features = ["panic_unwind", "backtrace"] }
|
||||||
test = { path = "./sysroot_src/library/test" }
|
test = { path = "./sysroot_src/library/test" }
|
||||||
|
@ -18,5 +20,5 @@ rustc-std-workspace-alloc = { path = "./sysroot_src/library/rustc-std-workspace-
|
||||||
rustc-std-workspace-std = { path = "./sysroot_src/library/rustc-std-workspace-std" }
|
rustc-std-workspace-std = { path = "./sysroot_src/library/rustc-std-workspace-std" }
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
debug = true
|
debug = "limited"
|
||||||
#lto = "fat" # TODO(antoyo): re-enable when the failing LTO tests regarding proc-macros are fixed.
|
#lto = "fat" # TODO(antoyo): re-enable when the failing LTO tests regarding proc-macros are fixed.
|
|
@ -1,5 +1,7 @@
|
||||||
use crate::config::{Channel, ConfigInfo};
|
use crate::config::{Channel, ConfigInfo};
|
||||||
use crate::utils::{run_command, run_command_with_output_and_env, walk_dir};
|
use crate::utils::{
|
||||||
|
copy_file, create_dir, get_sysroot_dir, run_command, run_command_with_output_and_env, walk_dir,
|
||||||
|
};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
@ -9,12 +11,14 @@ use std::path::Path;
|
||||||
struct BuildArg {
|
struct BuildArg {
|
||||||
flags: Vec<String>,
|
flags: Vec<String>,
|
||||||
config_info: ConfigInfo,
|
config_info: ConfigInfo,
|
||||||
|
build_sysroot: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildArg {
|
impl BuildArg {
|
||||||
|
/// Creates a new `BuildArg` instance by parsing command-line arguments.
|
||||||
fn new() -> Result<Option<Self>, String> {
|
fn new() -> Result<Option<Self>, String> {
|
||||||
let mut build_arg = Self::default();
|
let mut build_arg = Self::default();
|
||||||
// We skip binary name and the `build` command.
|
// Skip binary name and the `build` command.
|
||||||
let mut args = std::env::args().skip(2);
|
let mut args = std::env::args().skip(2);
|
||||||
|
|
||||||
while let Some(arg) = args.next() {
|
while let Some(arg) = args.next() {
|
||||||
|
@ -29,6 +33,9 @@ impl BuildArg {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"--sysroot" => {
|
||||||
|
build_arg.build_sysroot = true;
|
||||||
|
}
|
||||||
"--help" => {
|
"--help" => {
|
||||||
Self::usage();
|
Self::usage();
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
|
@ -48,20 +55,20 @@ impl BuildArg {
|
||||||
r#"
|
r#"
|
||||||
`build` command help:
|
`build` command help:
|
||||||
|
|
||||||
--features [arg] : Add a new feature [arg]"#
|
--features [arg] : Add a new feature [arg]
|
||||||
|
--sysroot : Build with sysroot"#
|
||||||
);
|
);
|
||||||
ConfigInfo::show_usage();
|
ConfigInfo::show_usage();
|
||||||
println!(" --help : Show this help");
|
println!(" --help : Show this help");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Result<(), String> {
|
fn cleanup_sysroot_previous_build(start_dir: &Path) {
|
||||||
let start_dir = Path::new("build_sysroot");
|
|
||||||
// Cleanup for previous run
|
// Cleanup for previous run
|
||||||
// Clean target dir except for build scripts and incremental cache
|
// Clean target dir except for build scripts and incremental cache
|
||||||
let _ = walk_dir(
|
let _ = walk_dir(
|
||||||
start_dir.join("target"),
|
start_dir.join("target"),
|
||||||
|dir: &Path| {
|
&mut |dir: &Path| {
|
||||||
for top in &["debug", "release"] {
|
for top in &["debug", "release"] {
|
||||||
let _ = fs::remove_dir_all(dir.join(top).join("build"));
|
let _ = fs::remove_dir_all(dir.join(top).join("build"));
|
||||||
let _ = fs::remove_dir_all(dir.join(top).join("deps"));
|
let _ = fs::remove_dir_all(dir.join(top).join("deps"));
|
||||||
|
@ -70,7 +77,7 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu
|
||||||
|
|
||||||
let _ = walk_dir(
|
let _ = walk_dir(
|
||||||
dir.join(top),
|
dir.join(top),
|
||||||
|sub_dir: &Path| {
|
&mut |sub_dir: &Path| {
|
||||||
if sub_dir
|
if sub_dir
|
||||||
.file_name()
|
.file_name()
|
||||||
.map(|filename| filename.to_str().unwrap().starts_with("libsysroot"))
|
.map(|filename| filename.to_str().unwrap().starts_with("libsysroot"))
|
||||||
|
@ -80,7 +87,7 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|file: &Path| {
|
&mut |file: &Path| {
|
||||||
if file
|
if file
|
||||||
.file_name()
|
.file_name()
|
||||||
.map(|filename| filename.to_str().unwrap().starts_with("libsysroot"))
|
.map(|filename| filename.to_str().unwrap().starts_with("libsysroot"))
|
||||||
|
@ -90,16 +97,39 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|_| Ok(()),
|
&mut |_| Ok(()),
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
let _ = fs::remove_file(start_dir.join("Cargo.lock"));
|
let _ = fs::remove_file(start_dir.join("Cargo.lock"));
|
||||||
let _ = fs::remove_file(start_dir.join("test_target/Cargo.lock"));
|
let _ = fs::remove_file(start_dir.join("test_target/Cargo.lock"));
|
||||||
let _ = fs::remove_dir_all(start_dir.join("sysroot"));
|
let _ = fs::remove_dir_all(start_dir.join("sysroot"));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_build_sysroot_content(start_dir: &Path) -> Result<(), String> {
|
||||||
|
if !start_dir.is_dir() {
|
||||||
|
create_dir(start_dir)?;
|
||||||
|
}
|
||||||
|
copy_file("build_system/build_sysroot/Cargo.toml", &start_dir.join("Cargo.toml"))?;
|
||||||
|
copy_file("build_system/build_sysroot/Cargo.lock", &start_dir.join("Cargo.lock"))?;
|
||||||
|
|
||||||
|
let src_dir = start_dir.join("src");
|
||||||
|
if !src_dir.is_dir() {
|
||||||
|
create_dir(&src_dir)?;
|
||||||
|
}
|
||||||
|
copy_file("build_system/build_sysroot/lib.rs", &start_dir.join("src/lib.rs"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Result<(), String> {
|
||||||
|
let start_dir = get_sysroot_dir();
|
||||||
|
|
||||||
|
cleanup_sysroot_previous_build(&start_dir);
|
||||||
|
create_build_sysroot_content(&start_dir)?;
|
||||||
|
|
||||||
// Builds libs
|
// Builds libs
|
||||||
let mut rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
|
let mut rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
|
||||||
|
@ -110,7 +140,6 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu
|
||||||
if config.no_default_features {
|
if config.no_default_features {
|
||||||
rustflags.push_str(" -Csymbol-mangling-version=v0");
|
rustflags.push_str(" -Csymbol-mangling-version=v0");
|
||||||
}
|
}
|
||||||
let mut env = env.clone();
|
|
||||||
|
|
||||||
let mut args: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"build", &"--target", &config.target];
|
let mut args: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"build", &"--target", &config.target];
|
||||||
|
|
||||||
|
@ -127,46 +156,33 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu
|
||||||
"debug"
|
"debug"
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if let Ok(cg_rustflags) = std::env::var("CG_RUSTFLAGS") {
|
||||||
|
rustflags.push(' ');
|
||||||
|
rustflags.push_str(&cg_rustflags);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut env = env.clone();
|
||||||
env.insert("RUSTFLAGS".to_string(), rustflags);
|
env.insert("RUSTFLAGS".to_string(), rustflags);
|
||||||
run_command_with_output_and_env(&args, Some(start_dir), Some(&env))?;
|
run_command_with_output_and_env(&args, Some(&start_dir), Some(&env))?;
|
||||||
|
|
||||||
// Copy files to sysroot
|
// Copy files to sysroot
|
||||||
let sysroot_path = start_dir.join(format!("sysroot/lib/rustlib/{}/lib/", config.target_triple));
|
let sysroot_path = start_dir.join(format!("sysroot/lib/rustlib/{}/lib/", config.target_triple));
|
||||||
fs::create_dir_all(&sysroot_path).map_err(|error| {
|
create_dir(&sysroot_path)?;
|
||||||
format!(
|
let mut copier = |dir_to_copy: &Path| {
|
||||||
"Failed to create directory `{}`: {:?}",
|
|
||||||
sysroot_path.display(),
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let copier = |dir_to_copy: &Path| {
|
|
||||||
// FIXME: should not use shell command!
|
// FIXME: should not use shell command!
|
||||||
run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ())
|
run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ())
|
||||||
};
|
};
|
||||||
walk_dir(
|
walk_dir(
|
||||||
start_dir.join(&format!("target/{}/{}/deps", config.target_triple, channel)),
|
start_dir.join(&format!("target/{}/{}/deps", config.target_triple, channel)),
|
||||||
copier,
|
&mut copier.clone(),
|
||||||
copier,
|
&mut copier,
|
||||||
|
false,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// Copy the source files to the sysroot (Rust for Linux needs this).
|
// Copy the source files to the sysroot (Rust for Linux needs this).
|
||||||
let sysroot_src_path = start_dir.join("sysroot/lib/rustlib/src/rust");
|
let sysroot_src_path = start_dir.join("sysroot/lib/rustlib/src/rust");
|
||||||
fs::create_dir_all(&sysroot_src_path).map_err(|error| {
|
create_dir(&sysroot_src_path)?;
|
||||||
format!(
|
run_command(&[&"cp", &"-r", &start_dir.join("sysroot_src/library/"), &sysroot_src_path], None)?;
|
||||||
"Failed to create directory `{}`: {:?}",
|
|
||||||
sysroot_src_path.display(),
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
run_command(
|
|
||||||
&[
|
|
||||||
&"cp",
|
|
||||||
&"-r",
|
|
||||||
&start_dir.join("sysroot_src/library/"),
|
|
||||||
&sysroot_src_path,
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -174,20 +190,11 @@ pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Resu
|
||||||
fn build_codegen(args: &mut BuildArg) -> Result<(), String> {
|
fn build_codegen(args: &mut BuildArg) -> Result<(), String> {
|
||||||
let mut env = HashMap::new();
|
let mut env = HashMap::new();
|
||||||
|
|
||||||
env.insert(
|
env.insert("LD_LIBRARY_PATH".to_string(), args.config_info.gcc_path.clone());
|
||||||
"LD_LIBRARY_PATH".to_string(),
|
env.insert("LIBRARY_PATH".to_string(), args.config_info.gcc_path.clone());
|
||||||
args.config_info.gcc_path.clone(),
|
|
||||||
);
|
|
||||||
env.insert(
|
|
||||||
"LIBRARY_PATH".to_string(),
|
|
||||||
args.config_info.gcc_path.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
if args.config_info.no_default_features {
|
if args.config_info.no_default_features {
|
||||||
env.insert(
|
env.insert("RUSTFLAGS".to_string(), "-Csymbol-mangling-version=v0".to_string());
|
||||||
"RUSTFLAGS".to_string(),
|
|
||||||
"-Csymbol-mangling-version=v0".to_string(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"rustc"];
|
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"rustc"];
|
||||||
|
@ -212,18 +219,15 @@ fn build_codegen(args: &mut BuildArg) -> Result<(), String> {
|
||||||
// We voluntarily ignore the error.
|
// We voluntarily ignore the error.
|
||||||
let _ = fs::remove_dir_all("target/out");
|
let _ = fs::remove_dir_all("target/out");
|
||||||
let gccjit_target = "target/out/gccjit";
|
let gccjit_target = "target/out/gccjit";
|
||||||
fs::create_dir_all(gccjit_target).map_err(|error| {
|
create_dir(gccjit_target)?;
|
||||||
format!(
|
if args.build_sysroot {
|
||||||
"Failed to create directory `{}`: {:?}",
|
println!("[BUILD] sysroot");
|
||||||
gccjit_target, error
|
build_sysroot(&env, &args.config_info)?;
|
||||||
)
|
}
|
||||||
})?;
|
|
||||||
|
|
||||||
println!("[BUILD] sysroot");
|
|
||||||
build_sysroot(&env, &args.config_info)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Executes the build process.
|
||||||
pub fn run() -> Result<(), String> {
|
pub fn run() -> Result<(), String> {
|
||||||
let mut args = match BuildArg::new()? {
|
let mut args = match BuildArg::new()? {
|
||||||
Some(args) => args,
|
Some(args) => args,
|
||||||
|
|
|
@ -1,114 +0,0 @@
|
||||||
use crate::config::ConfigInfo;
|
|
||||||
use crate::utils::{
|
|
||||||
get_toolchain, run_command_with_output_and_env_no_err, rustc_toolchain_version_info,
|
|
||||||
rustc_version_info,
|
|
||||||
};
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::ffi::OsStr;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
fn args() -> Result<Option<Vec<String>>, String> {
|
|
||||||
// We skip the binary and the "cargo" option.
|
|
||||||
if let Some("--help") = std::env::args().skip(2).next().as_deref() {
|
|
||||||
usage();
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
let args = std::env::args().skip(2).collect::<Vec<_>>();
|
|
||||||
if args.is_empty() {
|
|
||||||
return Err(
|
|
||||||
"Expected at least one argument for `cargo` subcommand, found none".to_string(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Ok(Some(args))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage() {
|
|
||||||
println!(
|
|
||||||
r#"
|
|
||||||
`cargo` command help:
|
|
||||||
|
|
||||||
[args] : Arguments to be passed to the cargo command
|
|
||||||
--help : Show this help
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run() -> Result<(), String> {
|
|
||||||
let args = match args()? {
|
|
||||||
Some(a) => a,
|
|
||||||
None => return Ok(()),
|
|
||||||
};
|
|
||||||
|
|
||||||
// We first need to go to the original location to ensure that the config setup will go as
|
|
||||||
// expected.
|
|
||||||
let current_dir = std::env::current_dir()
|
|
||||||
.and_then(|path| path.canonicalize())
|
|
||||||
.map_err(|error| format!("Failed to get current directory path: {:?}", error))?;
|
|
||||||
let current_exe = std::env::current_exe()
|
|
||||||
.and_then(|path| path.canonicalize())
|
|
||||||
.map_err(|error| format!("Failed to get current exe path: {:?}", error))?;
|
|
||||||
let mut parent_dir = current_exe
|
|
||||||
.components()
|
|
||||||
.map(|comp| comp.as_os_str())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
// We run this script from "build_system/target/release/y", so we need to remove these elements.
|
|
||||||
for to_remove in &["y", "release", "target", "build_system"] {
|
|
||||||
if parent_dir
|
|
||||||
.last()
|
|
||||||
.map(|part| part == to_remove)
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
parent_dir.pop();
|
|
||||||
} else {
|
|
||||||
return Err(format!(
|
|
||||||
"Build script not executed from `build_system/target/release/y` (in path {})",
|
|
||||||
current_exe.display(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let parent_dir = PathBuf::from(parent_dir.join(&OsStr::new("/")));
|
|
||||||
std::env::set_current_dir(&parent_dir).map_err(|error| {
|
|
||||||
format!(
|
|
||||||
"Failed to go to `{}` folder: {:?}",
|
|
||||||
parent_dir.display(),
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut env: HashMap<String, String> = std::env::vars().collect();
|
|
||||||
ConfigInfo::default().setup(&mut env, false)?;
|
|
||||||
let toolchain = get_toolchain()?;
|
|
||||||
|
|
||||||
let toolchain_version = rustc_toolchain_version_info(&toolchain)?;
|
|
||||||
let default_version = rustc_version_info(None)?;
|
|
||||||
if toolchain_version != default_version {
|
|
||||||
println!(
|
|
||||||
"rustc_codegen_gcc is built for {} but the default rustc version is {}.",
|
|
||||||
toolchain_version.short, default_version.short,
|
|
||||||
);
|
|
||||||
println!("Using {}.", toolchain_version.short);
|
|
||||||
}
|
|
||||||
|
|
||||||
// We go back to the original folder since we now have set up everything we needed.
|
|
||||||
std::env::set_current_dir(¤t_dir).map_err(|error| {
|
|
||||||
format!(
|
|
||||||
"Failed to go back to `{}` folder: {:?}",
|
|
||||||
current_dir.display(),
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
|
|
||||||
env.insert("RUSTDOCFLAGS".to_string(), rustflags);
|
|
||||||
let toolchain = format!("+{}", toolchain);
|
|
||||||
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &toolchain];
|
|
||||||
for arg in &args {
|
|
||||||
command.push(arg);
|
|
||||||
}
|
|
||||||
if run_command_with_output_and_env_no_err(&command, None, Some(&env)).is_err() {
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::utils::{remove_file, run_command};
|
use crate::utils::{get_sysroot_dir, remove_file, run_command};
|
||||||
|
|
||||||
use std::fs::remove_dir_all;
|
use std::fs::remove_dir_all;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
@ -42,11 +42,12 @@ fn usage() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clean_all() -> Result<(), String> {
|
fn clean_all() -> Result<(), String> {
|
||||||
|
let build_sysroot = get_sysroot_dir();
|
||||||
let dirs_to_remove = [
|
let dirs_to_remove = [
|
||||||
"target",
|
"target".into(),
|
||||||
"build_sysroot/sysroot",
|
build_sysroot.join("sysroot"),
|
||||||
"build_sysroot/sysroot_src",
|
build_sysroot.join("sysroot_src"),
|
||||||
"build_sysroot/target",
|
build_sysroot.join("target"),
|
||||||
];
|
];
|
||||||
for dir in dirs_to_remove {
|
for dir in dirs_to_remove {
|
||||||
let _ = remove_dir_all(dir);
|
let _ = remove_dir_all(dir);
|
||||||
|
@ -56,10 +57,11 @@ fn clean_all() -> Result<(), String> {
|
||||||
let _ = remove_dir_all(Path::new(crate::BUILD_DIR).join(dir));
|
let _ = remove_dir_all(Path::new(crate::BUILD_DIR).join(dir));
|
||||||
}
|
}
|
||||||
|
|
||||||
let files_to_remove = ["build_sysroot/Cargo.lock", "perf.data", "perf.data.old"];
|
let files_to_remove =
|
||||||
|
[build_sysroot.join("Cargo.lock"), "perf.data".into(), "perf.data.old".into()];
|
||||||
|
|
||||||
for file in files_to_remove {
|
for file in files_to_remove {
|
||||||
let _ = remove_file(file);
|
let _ = remove_file(&file);
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("Successfully ran `clean all`");
|
println!("Successfully ran `clean all`");
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use crate::utils::{
|
use crate::utils::{
|
||||||
create_symlink, get_os_name, run_command_with_output, rustc_version_info, split_args,
|
create_dir, create_symlink, get_os_name, get_sysroot_dir, run_command_with_output,
|
||||||
|
rustc_version_info, split_args,
|
||||||
};
|
};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::env as std_env;
|
use std::env as std_env;
|
||||||
|
@ -26,11 +27,7 @@ impl Channel {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn failed_config_parsing(config_file: &Path, err: &str) -> Result<ConfigFile, String> {
|
fn failed_config_parsing(config_file: &Path, err: &str) -> Result<ConfigFile, String> {
|
||||||
Err(format!(
|
Err(format!("Failed to parse `{}`: {}", config_file.display(), err))
|
||||||
"Failed to parse `{}`: {}",
|
|
||||||
config_file.display(),
|
|
||||||
err
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -48,11 +45,7 @@ impl ConfigFile {
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let toml = Toml::parse(&content).map_err(|err| {
|
let toml = Toml::parse(&content).map_err(|err| {
|
||||||
format!(
|
format!("Error occurred around `{}`: {:?}", &content[err.start..=err.end], err.kind)
|
||||||
"Error occurred around `{}`: {:?}",
|
|
||||||
&content[err.start..=err.end],
|
|
||||||
err.kind
|
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
let mut config = Self::default();
|
let mut config = Self::default();
|
||||||
for (key, value) in toml.iter() {
|
for (key, value) in toml.iter() {
|
||||||
|
@ -181,11 +174,7 @@ impl ConfigInfo {
|
||||||
},
|
},
|
||||||
"--use-backend" => match args.next() {
|
"--use-backend" => match args.next() {
|
||||||
Some(backend) if !backend.is_empty() => self.backend = Some(backend),
|
Some(backend) if !backend.is_empty() => self.backend = Some(backend),
|
||||||
_ => {
|
_ => return Err("Expected an argument after `--use-backend`, found nothing".into()),
|
||||||
return Err(
|
|
||||||
"Expected an argument after `--use-backend`, found nothing".into()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"--no-default-features" => self.no_default_features = true,
|
"--no-default-features" => self.no_default_features = true,
|
||||||
_ => return Ok(false),
|
_ => return Ok(false),
|
||||||
|
@ -228,20 +217,10 @@ impl ConfigInfo {
|
||||||
|
|
||||||
let output_dir = output_dir.join(&commit);
|
let output_dir = output_dir.join(&commit);
|
||||||
if !output_dir.is_dir() {
|
if !output_dir.is_dir() {
|
||||||
std::fs::create_dir_all(&output_dir).map_err(|err| {
|
create_dir(&output_dir)?;
|
||||||
format!(
|
|
||||||
"failed to create folder `{}`: {:?}",
|
|
||||||
output_dir.display(),
|
|
||||||
err,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
let output_dir = output_dir.canonicalize().map_err(|err| {
|
let output_dir = output_dir.canonicalize().map_err(|err| {
|
||||||
format!(
|
format!("Failed to get absolute path of `{}`: {:?}", output_dir.display(), err)
|
||||||
"Failed to get absolute path of `{}`: {:?}",
|
|
||||||
output_dir.display(),
|
|
||||||
err
|
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let libgccjit_so_name = "libgccjit.so";
|
let libgccjit_so_name = "libgccjit.so";
|
||||||
|
@ -252,13 +231,7 @@ impl ConfigInfo {
|
||||||
let tempfile = output_dir.join(&tempfile_name);
|
let tempfile = output_dir.join(&tempfile_name);
|
||||||
let is_in_ci = std::env::var("GITHUB_ACTIONS").is_ok();
|
let is_in_ci = std::env::var("GITHUB_ACTIONS").is_ok();
|
||||||
|
|
||||||
let url = format!(
|
download_gccjit(&commit, &output_dir, tempfile_name, !is_in_ci)?;
|
||||||
"https://github.com/antoyo/gcc/releases/download/master-{}/libgccjit.so",
|
|
||||||
commit,
|
|
||||||
);
|
|
||||||
|
|
||||||
println!("Downloading `{}`...", url);
|
|
||||||
download_gccjit(url, &output_dir, tempfile_name, !is_in_ci)?;
|
|
||||||
|
|
||||||
let libgccjit_so = output_dir.join(libgccjit_so_name);
|
let libgccjit_so = output_dir.join(libgccjit_so_name);
|
||||||
// If we reach this point, it means the file was correctly downloaded, so let's
|
// If we reach this point, it means the file was correctly downloaded, so let's
|
||||||
|
@ -275,10 +248,7 @@ impl ConfigInfo {
|
||||||
println!("Downloaded libgccjit.so version {} successfully!", commit);
|
println!("Downloaded libgccjit.so version {} successfully!", commit);
|
||||||
// We need to create a link named `libgccjit.so.0` because that's what the linker is
|
// We need to create a link named `libgccjit.so.0` because that's what the linker is
|
||||||
// looking for.
|
// looking for.
|
||||||
create_symlink(
|
create_symlink(&libgccjit_so, output_dir.join(&format!("{}.0", libgccjit_so_name)))?;
|
||||||
&libgccjit_so,
|
|
||||||
output_dir.join(&format!("{}.0", libgccjit_so_name)),
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.gcc_path = output_dir.display().to_string();
|
self.gcc_path = output_dir.display().to_string();
|
||||||
|
@ -298,10 +268,7 @@ impl ConfigInfo {
|
||||||
Some(config_file) => config_file.into(),
|
Some(config_file) => config_file.into(),
|
||||||
None => self.compute_path("config.toml"),
|
None => self.compute_path("config.toml"),
|
||||||
};
|
};
|
||||||
let ConfigFile {
|
let ConfigFile { gcc_path, download_gccjit } = ConfigFile::new(&config_file)?;
|
||||||
gcc_path,
|
|
||||||
download_gccjit,
|
|
||||||
} = ConfigFile::new(&config_file)?;
|
|
||||||
|
|
||||||
if let Some(true) = download_gccjit {
|
if let Some(true) = download_gccjit {
|
||||||
self.download_gccjit_if_needed()?;
|
self.download_gccjit_if_needed()?;
|
||||||
|
@ -310,10 +277,7 @@ impl ConfigInfo {
|
||||||
self.gcc_path = match gcc_path {
|
self.gcc_path = match gcc_path {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => {
|
None => {
|
||||||
return Err(format!(
|
return Err(format!("missing `gcc-path` value from `{}`", config_file.display(),))
|
||||||
"missing `gcc-path` value from `{}`",
|
|
||||||
config_file.display(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -393,15 +357,16 @@ impl ConfigInfo {
|
||||||
.join(&format!("librustc_codegen_gcc.{}", self.dylib_ext))
|
.join(&format!("librustc_codegen_gcc.{}", self.dylib_ext))
|
||||||
.display()
|
.display()
|
||||||
.to_string();
|
.to_string();
|
||||||
self.sysroot_path = current_dir
|
self.sysroot_path =
|
||||||
.join("build_sysroot/sysroot")
|
current_dir.join(&get_sysroot_dir()).join("sysroot").display().to_string();
|
||||||
.display()
|
|
||||||
.to_string();
|
|
||||||
if let Some(backend) = &self.backend {
|
if let Some(backend) = &self.backend {
|
||||||
|
// This option is only used in the rust compiler testsuite. The sysroot is handled
|
||||||
|
// by its build system directly so no need to set it ourselves.
|
||||||
rustflags.push(format!("-Zcodegen-backend={}", backend));
|
rustflags.push(format!("-Zcodegen-backend={}", backend));
|
||||||
} else {
|
} else {
|
||||||
rustflags.extend_from_slice(&[
|
rustflags.extend_from_slice(&[
|
||||||
"--sysroot".to_string(), self.sysroot_path.clone(),
|
"--sysroot".to_string(),
|
||||||
|
self.sysroot_path.clone(),
|
||||||
format!("-Zcodegen-backend={}", self.cg_backend_path),
|
format!("-Zcodegen-backend={}", self.cg_backend_path),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -422,13 +387,6 @@ impl ConfigInfo {
|
||||||
rustflags.push("-Csymbol-mangling-version=v0".to_string());
|
rustflags.push("-Csymbol-mangling-version=v0".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
rustflags.push("-Cdebuginfo=2".to_string());
|
|
||||||
|
|
||||||
// Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
|
|
||||||
// TODO(antoyo): remove when we can handle ThinLTO.
|
|
||||||
if !env.contains_key(&"FAT_LTO".to_string()) {
|
|
||||||
rustflags.push("-Clto=off".to_string());
|
|
||||||
}
|
|
||||||
// FIXME(antoyo): remove once the atomic shim is gone
|
// FIXME(antoyo): remove once the atomic shim is gone
|
||||||
if os_name == "Darwin" {
|
if os_name == "Darwin" {
|
||||||
rustflags.extend_from_slice(&[
|
rustflags.extend_from_slice(&[
|
||||||
|
@ -440,10 +398,9 @@ impl ConfigInfo {
|
||||||
// display metadata load errors
|
// display metadata load errors
|
||||||
env.insert("RUSTC_LOG".to_string(), "warn".to_string());
|
env.insert("RUSTC_LOG".to_string(), "warn".to_string());
|
||||||
|
|
||||||
let sysroot = current_dir.join(&format!(
|
let sysroot = current_dir
|
||||||
"build_sysroot/sysroot/lib/rustlib/{}/lib",
|
.join(&get_sysroot_dir())
|
||||||
self.target_triple,
|
.join(&format!("sysroot/lib/rustlib/{}/lib", self.target_triple));
|
||||||
));
|
|
||||||
let ld_library_path = format!(
|
let ld_library_path = format!(
|
||||||
"{target}:{sysroot}:{gcc_path}",
|
"{target}:{sysroot}:{gcc_path}",
|
||||||
target = self.cargo_target_dir,
|
target = self.cargo_target_dir,
|
||||||
|
@ -501,11 +458,27 @@ impl ConfigInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn download_gccjit(
|
fn download_gccjit(
|
||||||
url: String,
|
commit: &str,
|
||||||
output_dir: &Path,
|
output_dir: &Path,
|
||||||
tempfile_name: String,
|
tempfile_name: String,
|
||||||
with_progress_bar: bool,
|
with_progress_bar: bool,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
|
let url = if std::env::consts::OS == "linux" && std::env::consts::ARCH == "x86_64" {
|
||||||
|
format!("https://github.com/rust-lang/gcc/releases/download/master-{}/libgccjit.so", commit)
|
||||||
|
} else {
|
||||||
|
eprintln!(
|
||||||
|
"\
|
||||||
|
Pre-compiled libgccjit.so not available for this os or architecture.
|
||||||
|
Please compile it yourself and update the `config.toml` file
|
||||||
|
to `download-gccjit = false` and set `gcc-path` to the appropriate directory."
|
||||||
|
);
|
||||||
|
return Err(String::from(
|
||||||
|
"no appropriate pre-compiled libgccjit.so available for download",
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("Downloading `{}`...", url);
|
||||||
|
|
||||||
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
||||||
let mut ret = run_command_with_output(
|
let mut ret = run_command_with_output(
|
||||||
&[
|
&[
|
||||||
|
@ -521,11 +494,7 @@ fn download_gccjit(
|
||||||
&"--retry",
|
&"--retry",
|
||||||
&"3",
|
&"3",
|
||||||
&"-SRfL",
|
&"-SRfL",
|
||||||
if with_progress_bar {
|
if with_progress_bar { &"--progress-bar" } else { &"-s" },
|
||||||
&"--progress-bar"
|
|
||||||
} else {
|
|
||||||
&"-s"
|
|
||||||
},
|
|
||||||
&url.as_str(),
|
&url.as_str(),
|
||||||
],
|
],
|
||||||
Some(&output_dir),
|
Some(&output_dir),
|
||||||
|
|
35
compiler/rustc_codegen_gcc/build_system/src/fmt.rs
Normal file
35
compiler/rustc_codegen_gcc/build_system/src/fmt.rs
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
use crate::utils::run_command_with_output;
|
||||||
|
use std::ffi::OsStr;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
fn show_usage() {
|
||||||
|
println!(
|
||||||
|
r#"
|
||||||
|
`fmt` command help:
|
||||||
|
|
||||||
|
--check : Pass `--check` argument to `cargo fmt` commands
|
||||||
|
--help : Show this help"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run() -> Result<(), String> {
|
||||||
|
let mut check = false;
|
||||||
|
// We skip binary name and the `info` command.
|
||||||
|
let mut args = std::env::args().skip(2);
|
||||||
|
while let Some(arg) = args.next() {
|
||||||
|
match arg.as_str() {
|
||||||
|
"--help" => {
|
||||||
|
show_usage();
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
"--check" => check = true,
|
||||||
|
_ => return Err(format!("Unknown option {}", arg)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cmd: &[&dyn AsRef<OsStr>] =
|
||||||
|
if check { &[&"cargo", &"fmt", &"--check"] } else { &[&"cargo", &"fmt"] };
|
||||||
|
|
||||||
|
run_command_with_output(cmd, Some(&Path::new(".")))?;
|
||||||
|
run_command_with_output(cmd, Some(&Path::new("build_system")))
|
||||||
|
}
|
|
@ -2,12 +2,13 @@ use std::env;
|
||||||
use std::process;
|
use std::process;
|
||||||
|
|
||||||
mod build;
|
mod build;
|
||||||
mod cargo;
|
|
||||||
mod clean;
|
mod clean;
|
||||||
mod clone_gcc;
|
mod clone_gcc;
|
||||||
mod config;
|
mod config;
|
||||||
|
mod fmt;
|
||||||
mod info;
|
mod info;
|
||||||
mod prepare;
|
mod prepare;
|
||||||
|
mod rust_tools;
|
||||||
mod rustc_info;
|
mod rustc_info;
|
||||||
mod test;
|
mod test;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
@ -26,16 +27,23 @@ macro_rules! arg_error {
|
||||||
fn usage() {
|
fn usage() {
|
||||||
println!(
|
println!(
|
||||||
"\
|
"\
|
||||||
Available commands for build_system:
|
rustc_codegen_gcc build system
|
||||||
|
|
||||||
cargo : Run cargo command
|
Usage: build_system [command] [options]
|
||||||
clean : Run clean command
|
|
||||||
prepare : Run prepare command
|
Options:
|
||||||
build : Run build command
|
--help : Displays this help message.
|
||||||
test : Run test command
|
|
||||||
info : Run info command
|
Commands:
|
||||||
clone-gcc : Run clone-gcc command
|
cargo : Executes a cargo command.
|
||||||
--help : Show this message"
|
rustc : Compiles the program using the GCC compiler.
|
||||||
|
clean : Cleans the build directory, removing all compiled files and artifacts.
|
||||||
|
prepare : Prepares the environment for building, including fetching dependencies and setting up configurations.
|
||||||
|
build : Compiles the project.
|
||||||
|
test : Runs tests for the project.
|
||||||
|
info : Displays information about the build environment and project configuration.
|
||||||
|
clone-gcc : Clones the GCC compiler from a specified source.
|
||||||
|
fmt : Runs rustfmt"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,8 +53,10 @@ pub enum Command {
|
||||||
CloneGcc,
|
CloneGcc,
|
||||||
Prepare,
|
Prepare,
|
||||||
Build,
|
Build,
|
||||||
|
Rustc,
|
||||||
Test,
|
Test,
|
||||||
Info,
|
Info,
|
||||||
|
Fmt,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -56,12 +66,14 @@ fn main() {
|
||||||
|
|
||||||
let command = match env::args().nth(1).as_deref() {
|
let command = match env::args().nth(1).as_deref() {
|
||||||
Some("cargo") => Command::Cargo,
|
Some("cargo") => Command::Cargo,
|
||||||
|
Some("rustc") => Command::Rustc,
|
||||||
Some("clean") => Command::Clean,
|
Some("clean") => Command::Clean,
|
||||||
Some("prepare") => Command::Prepare,
|
Some("prepare") => Command::Prepare,
|
||||||
Some("build") => Command::Build,
|
Some("build") => Command::Build,
|
||||||
Some("test") => Command::Test,
|
Some("test") => Command::Test,
|
||||||
Some("info") => Command::Info,
|
Some("info") => Command::Info,
|
||||||
Some("clone-gcc") => Command::CloneGcc,
|
Some("clone-gcc") => Command::CloneGcc,
|
||||||
|
Some("fmt") => Command::Fmt,
|
||||||
Some("--help") => {
|
Some("--help") => {
|
||||||
usage();
|
usage();
|
||||||
process::exit(0);
|
process::exit(0);
|
||||||
|
@ -75,13 +87,15 @@ fn main() {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = match command {
|
if let Err(e) = match command {
|
||||||
Command::Cargo => cargo::run(),
|
Command::Cargo => rust_tools::run_cargo(),
|
||||||
|
Command::Rustc => rust_tools::run_rustc(),
|
||||||
Command::Clean => clean::run(),
|
Command::Clean => clean::run(),
|
||||||
Command::Prepare => prepare::run(),
|
Command::Prepare => prepare::run(),
|
||||||
Command::Build => build::run(),
|
Command::Build => build::run(),
|
||||||
Command::Test => test::run(),
|
Command::Test => test::run(),
|
||||||
Command::Info => info::run(),
|
Command::Info => info::run(),
|
||||||
Command::CloneGcc => clone_gcc::run(),
|
Command::CloneGcc => clone_gcc::run(),
|
||||||
|
Command::Fmt => fmt::run(),
|
||||||
} {
|
} {
|
||||||
eprintln!("Command failed to run: {e}");
|
eprintln!("Command failed to run: {e}");
|
||||||
process::exit(1);
|
process::exit(1);
|
||||||
|
|
|
@ -1,58 +1,58 @@
|
||||||
use crate::rustc_info::get_rustc_path;
|
use crate::rustc_info::get_rustc_path;
|
||||||
use crate::utils::{
|
use crate::utils::{
|
||||||
cargo_install, git_clone_root_dir, remove_file, run_command, run_command_with_output, walk_dir,
|
cargo_install, create_dir, get_sysroot_dir, git_clone_root_dir, remove_file, run_command,
|
||||||
|
run_command_with_output, walk_dir,
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::Path;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
fn prepare_libcore(
|
fn prepare_libcore(
|
||||||
sysroot_path: &Path,
|
sysroot_path: &Path,
|
||||||
libgccjit12_patches: bool,
|
libgccjit12_patches: bool,
|
||||||
cross_compile: bool,
|
cross_compile: bool,
|
||||||
|
sysroot_source: Option<String>,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let rustc_path = match get_rustc_path() {
|
let rustlib_dir: PathBuf;
|
||||||
Some(path) => path,
|
|
||||||
None => return Err("`rustc` path not found".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let parent = match rustc_path.parent() {
|
if let Some(path) = sysroot_source {
|
||||||
Some(path) => path,
|
rustlib_dir = Path::new(&path)
|
||||||
None => return Err(format!("No parent for `{}`", rustc_path.display())),
|
.canonicalize()
|
||||||
};
|
.map_err(|error| format!("Failed to canonicalize path: {:?}", error))?;
|
||||||
|
if !rustlib_dir.is_dir() {
|
||||||
|
return Err(format!("Custom sysroot path {:?} not found", rustlib_dir));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let rustc_path = match get_rustc_path() {
|
||||||
|
Some(path) => path,
|
||||||
|
None => return Err("`rustc` path not found".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
let rustlib_dir = parent
|
let parent = match rustc_path.parent() {
|
||||||
.join("../lib/rustlib/src/rust")
|
Some(path) => path,
|
||||||
.canonicalize()
|
None => return Err(format!("No parent for `{}`", rustc_path.display())),
|
||||||
.map_err(|error| format!("Failed to canonicalize path: {:?}", error))?;
|
};
|
||||||
if !rustlib_dir.is_dir() {
|
|
||||||
return Err("Please install `rust-src` component".to_string());
|
rustlib_dir = parent
|
||||||
|
.join("../lib/rustlib/src/rust")
|
||||||
|
.canonicalize()
|
||||||
|
.map_err(|error| format!("Failed to canonicalize path: {:?}", error))?;
|
||||||
|
if !rustlib_dir.is_dir() {
|
||||||
|
return Err("Please install `rust-src` component".to_string());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let sysroot_dir = sysroot_path.join("sysroot_src");
|
let sysroot_dir = sysroot_path.join("sysroot_src");
|
||||||
if sysroot_dir.is_dir() {
|
if sysroot_dir.is_dir() {
|
||||||
if let Err(error) = fs::remove_dir_all(&sysroot_dir) {
|
if let Err(error) = fs::remove_dir_all(&sysroot_dir) {
|
||||||
return Err(format!(
|
return Err(format!("Failed to remove `{}`: {:?}", sysroot_dir.display(), error,));
|
||||||
"Failed to remove `{}`: {:?}",
|
|
||||||
sysroot_dir.display(),
|
|
||||||
error,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let sysroot_library_dir = sysroot_dir.join("library");
|
let sysroot_library_dir = sysroot_dir.join("library");
|
||||||
fs::create_dir_all(&sysroot_library_dir).map_err(|error| {
|
create_dir(&sysroot_library_dir)?;
|
||||||
format!(
|
|
||||||
"Failed to create folder `{}`: {:?}",
|
|
||||||
sysroot_library_dir.display(),
|
|
||||||
error,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
run_command(
|
run_command(&[&"cp", &"-r", &rustlib_dir.join("library"), &sysroot_dir], None)?;
|
||||||
&[&"cp", &"-r", &rustlib_dir.join("library"), &sysroot_dir],
|
|
||||||
None,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
println!("[GIT] init (cwd): `{}`", sysroot_dir.display());
|
println!("[GIT] init (cwd): `{}`", sysroot_dir.display());
|
||||||
run_command(&[&"git", &"init"], Some(&sysroot_dir))?;
|
run_command(&[&"git", &"init"], Some(&sysroot_dir))?;
|
||||||
|
@ -63,70 +63,52 @@ fn prepare_libcore(
|
||||||
// This is needed on systems where nothing is configured.
|
// This is needed on systems where nothing is configured.
|
||||||
// git really needs something here, or it will fail.
|
// git really needs something here, or it will fail.
|
||||||
// Even using --author is not enough.
|
// Even using --author is not enough.
|
||||||
run_command(
|
run_command(&[&"git", &"config", &"user.email", &"none@example.com"], Some(&sysroot_dir))?;
|
||||||
&[&"git", &"config", &"user.email", &"none@example.com"],
|
run_command(&[&"git", &"config", &"user.name", &"None"], Some(&sysroot_dir))?;
|
||||||
Some(&sysroot_dir),
|
run_command(&[&"git", &"config", &"core.autocrlf", &"false"], Some(&sysroot_dir))?;
|
||||||
)?;
|
run_command(&[&"git", &"config", &"commit.gpgSign", &"false"], Some(&sysroot_dir))?;
|
||||||
run_command(
|
run_command(&[&"git", &"commit", &"-m", &"Initial commit", &"-q"], Some(&sysroot_dir))?;
|
||||||
&[&"git", &"config", &"user.name", &"None"],
|
|
||||||
Some(&sysroot_dir),
|
|
||||||
)?;
|
|
||||||
run_command(
|
|
||||||
&[&"git", &"config", &"core.autocrlf", &"false"],
|
|
||||||
Some(&sysroot_dir),
|
|
||||||
)?;
|
|
||||||
run_command(
|
|
||||||
&[&"git", &"config", &"commit.gpgSign", &"false"],
|
|
||||||
Some(&sysroot_dir),
|
|
||||||
)?;
|
|
||||||
run_command(
|
|
||||||
&[&"git", &"commit", &"-m", &"Initial commit", &"-q"],
|
|
||||||
Some(&sysroot_dir),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let mut patches = Vec::new();
|
let mut patches = Vec::new();
|
||||||
walk_dir(
|
walk_dir(
|
||||||
"patches",
|
"patches",
|
||||||
|_| Ok(()),
|
&mut |_| Ok(()),
|
||||||
|file_path: &Path| {
|
&mut |file_path: &Path| {
|
||||||
patches.push(file_path.to_path_buf());
|
patches.push(file_path.to_path_buf());
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
false,
|
||||||
)?;
|
)?;
|
||||||
if cross_compile {
|
if cross_compile {
|
||||||
walk_dir(
|
walk_dir(
|
||||||
"patches/cross_patches",
|
"patches/cross_patches",
|
||||||
|_| Ok(()),
|
&mut |_| Ok(()),
|
||||||
|file_path: &Path| {
|
&mut |file_path: &Path| {
|
||||||
patches.push(file_path.to_path_buf());
|
patches.push(file_path.to_path_buf());
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
false,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if libgccjit12_patches {
|
if libgccjit12_patches {
|
||||||
walk_dir(
|
walk_dir(
|
||||||
"patches/libgccjit12",
|
"patches/libgccjit12",
|
||||||
|_| Ok(()),
|
&mut |_| Ok(()),
|
||||||
|file_path: &Path| {
|
&mut |file_path: &Path| {
|
||||||
patches.push(file_path.to_path_buf());
|
patches.push(file_path.to_path_buf());
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
false,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
patches.sort();
|
patches.sort();
|
||||||
for file_path in patches {
|
for file_path in patches {
|
||||||
println!("[GIT] apply `{}`", file_path.display());
|
println!("[GIT] apply `{}`", file_path.display());
|
||||||
let path = Path::new("../..").join(file_path);
|
let path = Path::new("../../..").join(file_path);
|
||||||
run_command_with_output(&[&"git", &"apply", &path], Some(&sysroot_dir))?;
|
run_command_with_output(&[&"git", &"apply", &path], Some(&sysroot_dir))?;
|
||||||
run_command_with_output(&[&"git", &"add", &"-A"], Some(&sysroot_dir))?;
|
run_command_with_output(&[&"git", &"add", &"-A"], Some(&sysroot_dir))?;
|
||||||
run_command_with_output(
|
run_command_with_output(
|
||||||
&[
|
&[&"git", &"commit", &"--no-gpg-sign", &"-m", &format!("Patch {}", path.display())],
|
||||||
&"git",
|
|
||||||
&"commit",
|
|
||||||
&"--no-gpg-sign",
|
|
||||||
&"-m",
|
|
||||||
&format!("Patch {}", path.display()),
|
|
||||||
],
|
|
||||||
Some(&sysroot_dir),
|
Some(&sysroot_dir),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
@ -145,13 +127,7 @@ fn prepare_rand() -> Result<(), String> {
|
||||||
run_command_with_output(&[&"git", &"apply", &path], Some(rand_dir))?;
|
run_command_with_output(&[&"git", &"apply", &path], Some(rand_dir))?;
|
||||||
run_command_with_output(&[&"git", &"add", &"-A"], Some(rand_dir))?;
|
run_command_with_output(&[&"git", &"add", &"-A"], Some(rand_dir))?;
|
||||||
run_command_with_output(
|
run_command_with_output(
|
||||||
&[
|
&[&"git", &"commit", &"--no-gpg-sign", &"-m", &format!("Patch {}", path.display())],
|
||||||
&"git",
|
|
||||||
&"commit",
|
|
||||||
&"--no-gpg-sign",
|
|
||||||
&"-m",
|
|
||||||
&format!("Patch {}", path.display()),
|
|
||||||
],
|
|
||||||
Some(rand_dir),
|
Some(rand_dir),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -165,10 +141,7 @@ fn build_raytracer(repo_dir: &Path) -> Result<(), String> {
|
||||||
if mv_target.is_file() {
|
if mv_target.is_file() {
|
||||||
remove_file(&mv_target)?;
|
remove_file(&mv_target)?;
|
||||||
}
|
}
|
||||||
run_command(
|
run_command(&[&"mv", &"target/debug/main", &"raytracer_cg_llvm"], Some(repo_dir))?;
|
||||||
&[&"mv", &"target/debug/main", &"raytracer_cg_llvm"],
|
|
||||||
Some(repo_dir),
|
|
||||||
)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,6 +166,7 @@ struct PrepareArg {
|
||||||
cross_compile: bool,
|
cross_compile: bool,
|
||||||
only_libcore: bool,
|
only_libcore: bool,
|
||||||
libgccjit12_patches: bool,
|
libgccjit12_patches: bool,
|
||||||
|
sysroot_source: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrepareArg {
|
impl PrepareArg {
|
||||||
|
@ -200,12 +174,23 @@ impl PrepareArg {
|
||||||
let mut only_libcore = false;
|
let mut only_libcore = false;
|
||||||
let mut cross_compile = false;
|
let mut cross_compile = false;
|
||||||
let mut libgccjit12_patches = false;
|
let mut libgccjit12_patches = false;
|
||||||
|
let mut sysroot_source = None;
|
||||||
|
|
||||||
for arg in std::env::args().skip(2) {
|
let mut args = std::env::args().skip(2);
|
||||||
|
while let Some(arg) = args.next() {
|
||||||
match arg.as_str() {
|
match arg.as_str() {
|
||||||
"--only-libcore" => only_libcore = true,
|
"--only-libcore" => only_libcore = true,
|
||||||
"--cross" => cross_compile = true,
|
"--cross" => cross_compile = true,
|
||||||
"--libgccjit12-patches" => libgccjit12_patches = true,
|
"--libgccjit12-patches" => libgccjit12_patches = true,
|
||||||
|
"--sysroot-source" => {
|
||||||
|
if let Some(path) = args.next() {
|
||||||
|
sysroot_source = Some(path);
|
||||||
|
} else {
|
||||||
|
return Err(
|
||||||
|
"Expected a value after `--sysroot-source`, found nothing".to_string()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
"--help" => {
|
"--help" => {
|
||||||
Self::usage();
|
Self::usage();
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
|
@ -213,11 +198,7 @@ impl PrepareArg {
|
||||||
a => return Err(format!("Unknown argument `{a}`")),
|
a => return Err(format!("Unknown argument `{a}`")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Some(Self {
|
Ok(Some(Self { cross_compile, only_libcore, libgccjit12_patches, sysroot_source }))
|
||||||
cross_compile,
|
|
||||||
only_libcore,
|
|
||||||
libgccjit12_patches,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage() {
|
fn usage() {
|
||||||
|
@ -228,6 +209,7 @@ impl PrepareArg {
|
||||||
--only-libcore : Only setup libcore and don't clone other repositories
|
--only-libcore : Only setup libcore and don't clone other repositories
|
||||||
--cross : Apply the patches needed to do cross-compilation
|
--cross : Apply the patches needed to do cross-compilation
|
||||||
--libgccjit12-patches : Apply patches needed for libgccjit12
|
--libgccjit12-patches : Apply patches needed for libgccjit12
|
||||||
|
--sysroot-source : Specify custom path for sysroot source
|
||||||
--help : Show this help"#
|
--help : Show this help"#
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -238,8 +220,13 @@ pub fn run() -> Result<(), String> {
|
||||||
Some(a) => a,
|
Some(a) => a,
|
||||||
None => return Ok(()),
|
None => return Ok(()),
|
||||||
};
|
};
|
||||||
let sysroot_path = Path::new("build_sysroot");
|
let sysroot_path = get_sysroot_dir();
|
||||||
prepare_libcore(sysroot_path, args.libgccjit12_patches, args.cross_compile)?;
|
prepare_libcore(
|
||||||
|
&sysroot_path,
|
||||||
|
args.libgccjit12_patches,
|
||||||
|
args.cross_compile,
|
||||||
|
args.sysroot_source,
|
||||||
|
)?;
|
||||||
|
|
||||||
if !args.only_libcore {
|
if !args.only_libcore {
|
||||||
cargo_install("hyperfine")?;
|
cargo_install("hyperfine")?;
|
||||||
|
|
125
compiler/rustc_codegen_gcc/build_system/src/rust_tools.rs
Normal file
125
compiler/rustc_codegen_gcc/build_system/src/rust_tools.rs
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
use crate::config::ConfigInfo;
|
||||||
|
use crate::utils::{
|
||||||
|
get_toolchain, run_command_with_output_and_env_no_err, rustc_toolchain_version_info,
|
||||||
|
rustc_version_info,
|
||||||
|
};
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::ffi::OsStr;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
fn args(command: &str) -> Result<Option<Vec<String>>, String> {
|
||||||
|
// We skip the binary and the "cargo"/"rustc" option.
|
||||||
|
if let Some("--help") = std::env::args().skip(2).next().as_deref() {
|
||||||
|
usage(command);
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
let args = std::env::args().skip(2).collect::<Vec<_>>();
|
||||||
|
if args.is_empty() {
|
||||||
|
return Err(format!(
|
||||||
|
"Expected at least one argument for `{}` subcommand, found none",
|
||||||
|
command
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(Some(args))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(command: &str) {
|
||||||
|
println!(
|
||||||
|
r#"
|
||||||
|
`{}` command help:
|
||||||
|
|
||||||
|
[args] : Arguments to be passed to the cargo command
|
||||||
|
--help : Show this help
|
||||||
|
"#,
|
||||||
|
command,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RustcTools {
|
||||||
|
env: HashMap<String, String>,
|
||||||
|
args: Vec<String>,
|
||||||
|
toolchain: String,
|
||||||
|
config: ConfigInfo,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RustcTools {
|
||||||
|
fn new(command: &str) -> Result<Option<Self>, String> {
|
||||||
|
let Some(args) = args(command)? else { return Ok(None) };
|
||||||
|
|
||||||
|
// We first need to go to the original location to ensure that the config setup will go as
|
||||||
|
// expected.
|
||||||
|
let current_dir = std::env::current_dir()
|
||||||
|
.and_then(|path| path.canonicalize())
|
||||||
|
.map_err(|error| format!("Failed to get current directory path: {:?}", error))?;
|
||||||
|
let current_exe = std::env::current_exe()
|
||||||
|
.and_then(|path| path.canonicalize())
|
||||||
|
.map_err(|error| format!("Failed to get current exe path: {:?}", error))?;
|
||||||
|
let mut parent_dir =
|
||||||
|
current_exe.components().map(|comp| comp.as_os_str()).collect::<Vec<_>>();
|
||||||
|
// We run this script from "build_system/target/release/y", so we need to remove these elements.
|
||||||
|
for to_remove in &["y", "release", "target", "build_system"] {
|
||||||
|
if parent_dir.last().map(|part| part == to_remove).unwrap_or(false) {
|
||||||
|
parent_dir.pop();
|
||||||
|
} else {
|
||||||
|
return Err(format!(
|
||||||
|
"Build script not executed from `build_system/target/release/y` (in path {})",
|
||||||
|
current_exe.display(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let parent_dir = PathBuf::from(parent_dir.join(&OsStr::new("/")));
|
||||||
|
std::env::set_current_dir(&parent_dir).map_err(|error| {
|
||||||
|
format!("Failed to go to `{}` folder: {:?}", parent_dir.display(), error)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut env: HashMap<String, String> = std::env::vars().collect();
|
||||||
|
let mut config = ConfigInfo::default();
|
||||||
|
config.setup(&mut env, false)?;
|
||||||
|
let toolchain = get_toolchain()?;
|
||||||
|
|
||||||
|
let toolchain_version = rustc_toolchain_version_info(&toolchain)?;
|
||||||
|
let default_version = rustc_version_info(None)?;
|
||||||
|
if toolchain_version != default_version {
|
||||||
|
println!(
|
||||||
|
"rustc_codegen_gcc is built for {} but the default rustc version is {}.",
|
||||||
|
toolchain_version.short, default_version.short,
|
||||||
|
);
|
||||||
|
println!("Using {}.", toolchain_version.short);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We go back to the original folder since we now have set up everything we needed.
|
||||||
|
std::env::set_current_dir(¤t_dir).map_err(|error| {
|
||||||
|
format!("Failed to go back to `{}` folder: {:?}", current_dir.display(), error)
|
||||||
|
})?;
|
||||||
|
let toolchain = format!("+{}", toolchain);
|
||||||
|
Ok(Some(Self { toolchain, args, env, config }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_cargo() -> Result<(), String> {
|
||||||
|
let Some(mut tools) = RustcTools::new("cargo")? else { return Ok(()) };
|
||||||
|
let rustflags = tools.env.get("RUSTFLAGS").cloned().unwrap_or_default();
|
||||||
|
tools.env.insert("RUSTDOCFLAGS".to_string(), rustflags);
|
||||||
|
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &tools.toolchain];
|
||||||
|
for arg in &tools.args {
|
||||||
|
command.push(arg);
|
||||||
|
}
|
||||||
|
if run_command_with_output_and_env_no_err(&command, None, Some(&tools.env)).is_err() {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_rustc() -> Result<(), String> {
|
||||||
|
let Some(tools) = RustcTools::new("rustc")? else { return Ok(()) };
|
||||||
|
let mut command = tools.config.rustc_command_vec();
|
||||||
|
for arg in &tools.args {
|
||||||
|
command.push(arg);
|
||||||
|
}
|
||||||
|
if run_command_with_output_and_env_no_err(&command, None, Some(&tools.env)).is_err() {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -1,13 +1,14 @@
|
||||||
use crate::build;
|
use crate::build;
|
||||||
use crate::config::{Channel, ConfigInfo};
|
use crate::config::{Channel, ConfigInfo};
|
||||||
use crate::utils::{
|
use crate::utils::{
|
||||||
get_toolchain, git_clone, git_clone_root_dir, remove_file, run_command, run_command_with_env,
|
create_dir, get_sysroot_dir, get_toolchain, git_clone, git_clone_root_dir, remove_file,
|
||||||
run_command_with_output_and_env, rustc_version_info, split_args, walk_dir,
|
run_command, run_command_with_env, run_command_with_output_and_env, rustc_version_info,
|
||||||
|
split_args, walk_dir,
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::collections::{BTreeSet, HashMap};
|
use std::collections::HashMap;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fs::{create_dir_all, remove_dir_all, File};
|
use std::fs::{remove_dir_all, File};
|
||||||
use std::io::{BufRead, BufReader};
|
use std::io::{BufRead, BufReader};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
@ -19,46 +20,27 @@ type Runners = HashMap<&'static str, (&'static str, Runner)>;
|
||||||
fn get_runners() -> Runners {
|
fn get_runners() -> Runners {
|
||||||
let mut runners = HashMap::new();
|
let mut runners = HashMap::new();
|
||||||
|
|
||||||
|
runners.insert("--test-rustc", ("Run all rustc tests", test_rustc as Runner));
|
||||||
|
runners
|
||||||
|
.insert("--test-successful-rustc", ("Run successful rustc tests", test_successful_rustc));
|
||||||
runners.insert(
|
runners.insert(
|
||||||
"--test-rustc",
|
"--test-failing-ui-pattern-tests",
|
||||||
("Run all rustc tests", test_rustc as Runner),
|
("Run failing ui pattern tests", test_failing_ui_pattern_tests),
|
||||||
);
|
|
||||||
runners.insert(
|
|
||||||
"--test-successful-rustc",
|
|
||||||
("Run successful rustc tests", test_successful_rustc),
|
|
||||||
);
|
|
||||||
runners.insert(
|
|
||||||
"--test-failing-rustc",
|
|
||||||
("Run failing rustc tests", test_failing_rustc),
|
|
||||||
);
|
|
||||||
runners.insert(
|
|
||||||
"--projects",
|
|
||||||
("Run the tests of popular crates", test_projects),
|
|
||||||
);
|
);
|
||||||
|
runners.insert("--test-failing-rustc", ("Run failing rustc tests", test_failing_rustc));
|
||||||
|
runners.insert("--projects", ("Run the tests of popular crates", test_projects));
|
||||||
runners.insert("--test-libcore", ("Run libcore tests", test_libcore));
|
runners.insert("--test-libcore", ("Run libcore tests", test_libcore));
|
||||||
runners.insert("--clean", ("Empty cargo target directory", clean));
|
runners.insert("--clean", ("Empty cargo target directory", clean));
|
||||||
runners.insert("--build-sysroot", ("Build sysroot", build_sysroot));
|
runners.insert("--build-sysroot", ("Build sysroot", build_sysroot));
|
||||||
runners.insert("--std-tests", ("Run std tests", std_tests));
|
runners.insert("--std-tests", ("Run std tests", std_tests));
|
||||||
runners.insert("--asm-tests", ("Run asm tests", asm_tests));
|
runners.insert("--asm-tests", ("Run asm tests", asm_tests));
|
||||||
runners.insert(
|
runners.insert("--extended-tests", ("Run extended sysroot tests", extended_sysroot_tests));
|
||||||
"--extended-tests",
|
runners.insert("--extended-rand-tests", ("Run extended rand tests", extended_rand_tests));
|
||||||
("Run extended sysroot tests", extended_sysroot_tests),
|
|
||||||
);
|
|
||||||
runners.insert(
|
|
||||||
"--extended-rand-tests",
|
|
||||||
("Run extended rand tests", extended_rand_tests),
|
|
||||||
);
|
|
||||||
runners.insert(
|
runners.insert(
|
||||||
"--extended-regex-example-tests",
|
"--extended-regex-example-tests",
|
||||||
(
|
("Run extended regex example tests", extended_regex_example_tests),
|
||||||
"Run extended regex example tests",
|
|
||||||
extended_regex_example_tests,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
runners.insert(
|
|
||||||
"--extended-regex-tests",
|
|
||||||
("Run extended regex tests", extended_regex_tests),
|
|
||||||
);
|
);
|
||||||
|
runners.insert("--extended-regex-tests", ("Run extended regex tests", extended_regex_tests));
|
||||||
runners.insert("--mini-tests", ("Run mini tests", mini_tests));
|
runners.insert("--mini-tests", ("Run mini tests", mini_tests));
|
||||||
|
|
||||||
runners
|
runners
|
||||||
|
@ -71,15 +53,9 @@ fn get_number_after_arg(
|
||||||
match args.next() {
|
match args.next() {
|
||||||
Some(nb) if !nb.is_empty() => match usize::from_str(&nb) {
|
Some(nb) if !nb.is_empty() => match usize::from_str(&nb) {
|
||||||
Ok(nb) => Ok(nb),
|
Ok(nb) => Ok(nb),
|
||||||
Err(_) => Err(format!(
|
Err(_) => Err(format!("Expected a number after `{}`, found `{}`", option, nb)),
|
||||||
"Expected a number after `{}`, found `{}`",
|
|
||||||
option, nb
|
|
||||||
)),
|
|
||||||
},
|
},
|
||||||
_ => Err(format!(
|
_ => Err(format!("Expected a number after `{}`, found nothing", option)),
|
||||||
"Expected a number after `{}`, found nothing",
|
|
||||||
option
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,7 +86,7 @@ fn show_usage() {
|
||||||
struct TestArg {
|
struct TestArg {
|
||||||
build_only: bool,
|
build_only: bool,
|
||||||
use_system_gcc: bool,
|
use_system_gcc: bool,
|
||||||
runners: BTreeSet<String>,
|
runners: Vec<String>,
|
||||||
flags: Vec<String>,
|
flags: Vec<String>,
|
||||||
nb_parts: Option<usize>,
|
nb_parts: Option<usize>,
|
||||||
current_part: Option<usize>,
|
current_part: Option<usize>,
|
||||||
|
@ -130,9 +106,7 @@ impl TestArg {
|
||||||
match arg.as_str() {
|
match arg.as_str() {
|
||||||
"--features" => match args.next() {
|
"--features" => match args.next() {
|
||||||
Some(feature) if !feature.is_empty() => {
|
Some(feature) if !feature.is_empty() => {
|
||||||
test_arg
|
test_arg.flags.extend_from_slice(&["--features".into(), feature]);
|
||||||
.flags
|
|
||||||
.extend_from_slice(&["--features".into(), feature]);
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err("Expected an argument after `--features`, found nothing".into())
|
return Err("Expected an argument after `--features`, found nothing".into())
|
||||||
|
@ -157,8 +131,10 @@ impl TestArg {
|
||||||
show_usage();
|
show_usage();
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
x if runners.contains_key(x) => {
|
x if runners.contains_key(x)
|
||||||
test_arg.runners.insert(x.into());
|
&& !test_arg.runners.iter().any(|runner| runner == x) =>
|
||||||
|
{
|
||||||
|
test_arg.runners.push(x.into());
|
||||||
}
|
}
|
||||||
arg => {
|
arg => {
|
||||||
if !test_arg.config_info.parse_argument(arg, &mut args)? {
|
if !test_arg.config_info.parse_argument(arg, &mut args)? {
|
||||||
|
@ -211,8 +187,7 @@ fn build_if_no_backend(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
fn clean(_env: &Env, args: &TestArg) -> Result<(), String> {
|
fn clean(_env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
let _ = std::fs::remove_dir_all(&args.config_info.cargo_target_dir);
|
let _ = std::fs::remove_dir_all(&args.config_info.cargo_target_dir);
|
||||||
let path = Path::new(&args.config_info.cargo_target_dir).join("gccjit");
|
let path = Path::new(&args.config_info.cargo_target_dir).join("gccjit");
|
||||||
std::fs::create_dir_all(&path)
|
create_dir(&path)
|
||||||
.map_err(|error| format!("failed to create folder `{}`: {:?}", path.display(), error))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
|
@ -304,13 +279,8 @@ fn maybe_run_command_in_vm(
|
||||||
let sudo_command: &[&dyn AsRef<OsStr>] = &[&"sudo", &"cp", &exe, &vm_exe_path];
|
let sudo_command: &[&dyn AsRef<OsStr>] = &[&"sudo", &"cp", &exe, &vm_exe_path];
|
||||||
run_command_with_env(sudo_command, None, Some(env))?;
|
run_command_with_env(sudo_command, None, Some(env))?;
|
||||||
|
|
||||||
let mut vm_command: Vec<&dyn AsRef<OsStr>> = vec![
|
let mut vm_command: Vec<&dyn AsRef<OsStr>> =
|
||||||
&"sudo",
|
vec![&"sudo", &"chroot", &vm_dir, &"qemu-m68k-static", &inside_vm_exe_path];
|
||||||
&"chroot",
|
|
||||||
&vm_dir,
|
|
||||||
&"qemu-m68k-static",
|
|
||||||
&inside_vm_exe_path,
|
|
||||||
];
|
|
||||||
vm_command.extend_from_slice(command);
|
vm_command.extend_from_slice(command);
|
||||||
run_command_with_output_and_env(&vm_command, Some(&vm_parent_dir), Some(env))?;
|
run_command_with_output_and_env(&vm_command, Some(&vm_parent_dir), Some(env))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -399,11 +369,7 @@ fn std_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
}
|
}
|
||||||
run_command_with_env(&command, None, Some(env))?;
|
run_command_with_env(&command, None, Some(env))?;
|
||||||
maybe_run_command_in_vm(
|
maybe_run_command_in_vm(
|
||||||
&[
|
&[&cargo_target_dir.join("std_example"), &"--target", &args.config_info.target_triple],
|
||||||
&cargo_target_dir.join("std_example"),
|
|
||||||
&"--target",
|
|
||||||
&args.config_info.target_triple,
|
|
||||||
],
|
|
||||||
env,
|
env,
|
||||||
args,
|
args,
|
||||||
)?;
|
)?;
|
||||||
|
@ -427,11 +393,7 @@ fn std_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
command.push(test_flag);
|
command.push(test_flag);
|
||||||
}
|
}
|
||||||
run_command_with_env(&command, None, Some(env))?;
|
run_command_with_env(&command, None, Some(env))?;
|
||||||
maybe_run_command_in_vm(
|
maybe_run_command_in_vm(&[&cargo_target_dir.join("subslice-patterns-const-eval")], env, args)?;
|
||||||
&[&cargo_target_dir.join("subslice-patterns-const-eval")],
|
|
||||||
env,
|
|
||||||
args,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
||||||
println!("[AOT] track-caller-attribute");
|
println!("[AOT] track-caller-attribute");
|
||||||
|
@ -447,11 +409,7 @@ fn std_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
command.push(test_flag);
|
command.push(test_flag);
|
||||||
}
|
}
|
||||||
run_command_with_env(&command, None, Some(env))?;
|
run_command_with_env(&command, None, Some(env))?;
|
||||||
maybe_run_command_in_vm(
|
maybe_run_command_in_vm(&[&cargo_target_dir.join("track-caller-attribute")], env, args)?;
|
||||||
&[&cargo_target_dir.join("track-caller-attribute")],
|
|
||||||
env,
|
|
||||||
args,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
||||||
println!("[AOT] mod_bench");
|
println!("[AOT] mod_bench");
|
||||||
|
@ -477,11 +435,7 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> {
|
||||||
);
|
);
|
||||||
let rust_dir_path = Path::new(crate::BUILD_DIR).join("rust");
|
let rust_dir_path = Path::new(crate::BUILD_DIR).join("rust");
|
||||||
// If the repository was already cloned, command will fail, so doesn't matter.
|
// If the repository was already cloned, command will fail, so doesn't matter.
|
||||||
let _ = git_clone(
|
let _ = git_clone("https://github.com/rust-lang/rust.git", Some(&rust_dir_path), false);
|
||||||
"https://github.com/rust-lang/rust.git",
|
|
||||||
Some(&rust_dir_path),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
let rust_dir: Option<&Path> = Some(&rust_dir_path);
|
let rust_dir: Option<&Path> = Some(&rust_dir_path);
|
||||||
run_command(&[&"git", &"checkout", &"--", &"tests/"], rust_dir)?;
|
run_command(&[&"git", &"checkout", &"--", &"tests/"], rust_dir)?;
|
||||||
run_command_with_output_and_env(&[&"git", &"fetch"], rust_dir, Some(env))?;
|
run_command_with_output_and_env(&[&"git", &"fetch"], rust_dir, Some(env))?;
|
||||||
|
@ -511,12 +465,8 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> {
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
let rustc = String::from_utf8(
|
let rustc = String::from_utf8(
|
||||||
run_command_with_env(
|
run_command_with_env(&[&"rustup", &toolchain, &"which", &"rustc"], rust_dir, Some(env))?
|
||||||
&[&"rustup", &toolchain, &"which", &"rustc"],
|
.stdout,
|
||||||
rust_dir,
|
|
||||||
Some(env),
|
|
||||||
)?
|
|
||||||
.stdout,
|
|
||||||
)
|
)
|
||||||
.map_err(|error| format!("Failed to retrieve rustc path: {:?}", error))
|
.map_err(|error| format!("Failed to retrieve rustc path: {:?}", error))
|
||||||
.and_then(|rustc| {
|
.and_then(|rustc| {
|
||||||
|
@ -573,13 +523,7 @@ download-ci-llvm = false
|
||||||
llvm_filecheck = llvm_filecheck.trim(),
|
llvm_filecheck = llvm_filecheck.trim(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.map_err(|error| {
|
.map_err(|error| format!("Failed to write into `{}`: {:?}", file_path.display(), error))?;
|
||||||
format!(
|
|
||||||
"Failed to write into `{}`: {:?}",
|
|
||||||
file_path.display(),
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
Ok(rust_dir_path)
|
Ok(rust_dir_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -591,21 +535,19 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
|
|
||||||
env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string());
|
env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string());
|
||||||
|
|
||||||
let extra = if args.is_using_gcc_master_branch() {
|
let extra =
|
||||||
""
|
if args.is_using_gcc_master_branch() { "" } else { " -Csymbol-mangling-version=v0" };
|
||||||
} else {
|
|
||||||
" -Csymbol-mangling-version=v0"
|
|
||||||
};
|
|
||||||
|
|
||||||
let rustc_args = &format!(
|
let rustc_args = &format!(
|
||||||
r#"-Zpanic-abort-tests \
|
r#"-Zpanic-abort-tests \
|
||||||
-Zcodegen-backend="{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}" \
|
-Zcodegen-backend="{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}" \
|
||||||
--sysroot "{pwd}/build_sysroot/sysroot" -Cpanic=abort{extra}"#,
|
--sysroot "{sysroot_dir}" -Cpanic=abort{extra}"#,
|
||||||
pwd = std::env::current_dir()
|
pwd = std::env::current_dir()
|
||||||
.map_err(|error| format!("`current_dir` failed: {:?}", error))?
|
.map_err(|error| format!("`current_dir` failed: {:?}", error))?
|
||||||
.display(),
|
.display(),
|
||||||
channel = args.config_info.channel.as_str(),
|
channel = args.config_info.channel.as_str(),
|
||||||
dylib_ext = args.config_info.dylib_ext,
|
dylib_ext = args.config_info.dylib_ext,
|
||||||
|
sysroot_dir = args.config_info.sysroot_path,
|
||||||
extra = extra,
|
extra = extra,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -703,20 +645,23 @@ fn test_projects(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
//"https://github.com/rust-lang/cargo", // TODO: very slow, only run on master?
|
//"https://github.com/rust-lang/cargo", // TODO: very slow, only run on master?
|
||||||
];
|
];
|
||||||
|
|
||||||
|
let mut env = env.clone();
|
||||||
|
let rustflags =
|
||||||
|
format!("{} --cap-lints allow", env.get("RUSTFLAGS").cloned().unwrap_or_default());
|
||||||
|
env.insert("RUSTFLAGS".to_string(), rustflags);
|
||||||
let run_tests = |projects_path, iter: &mut dyn Iterator<Item = &&str>| -> Result<(), String> {
|
let run_tests = |projects_path, iter: &mut dyn Iterator<Item = &&str>| -> Result<(), String> {
|
||||||
for project in iter {
|
for project in iter {
|
||||||
let clone_result = git_clone_root_dir(project, projects_path, true)?;
|
let clone_result = git_clone_root_dir(project, projects_path, true)?;
|
||||||
let repo_path = Path::new(&clone_result.repo_dir);
|
let repo_path = Path::new(&clone_result.repo_dir);
|
||||||
run_cargo_command(&[&"build", &"--release"], Some(repo_path), env, args)?;
|
run_cargo_command(&[&"build", &"--release"], Some(repo_path), &env, args)?;
|
||||||
run_cargo_command(&[&"test"], Some(repo_path), env, args)?;
|
run_cargo_command(&[&"test"], Some(repo_path), &env, args)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
let projects_path = Path::new("projects");
|
let projects_path = Path::new("projects");
|
||||||
create_dir_all(projects_path)
|
create_dir(projects_path)?;
|
||||||
.map_err(|err| format!("Failed to create directory `projects`: {}", err))?;
|
|
||||||
|
|
||||||
let nb_parts = args.nb_parts.unwrap_or(0);
|
let nb_parts = args.nb_parts.unwrap_or(0);
|
||||||
if nb_parts > 0 {
|
if nb_parts > 0 {
|
||||||
|
@ -737,9 +682,9 @@ fn test_projects(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
fn test_libcore(env: &Env, args: &TestArg) -> Result<(), String> {
|
fn test_libcore(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
||||||
println!("[TEST] libcore");
|
println!("[TEST] libcore");
|
||||||
let path = Path::new("build_sysroot/sysroot_src/library/core/tests");
|
let path = get_sysroot_dir().join("sysroot_src/library/core/tests");
|
||||||
let _ = remove_dir_all(path.join("target"));
|
let _ = remove_dir_all(path.join("target"));
|
||||||
run_cargo_command(&[&"test"], Some(path), env, args)?;
|
run_cargo_command(&[&"test"], Some(&path), env, args)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -763,10 +708,8 @@ fn extended_rand_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
}
|
}
|
||||||
let mut env = env.clone();
|
let mut env = env.clone();
|
||||||
// newer aho_corasick versions throw a deprecation warning
|
// newer aho_corasick versions throw a deprecation warning
|
||||||
let rustflags = format!(
|
let rustflags =
|
||||||
"{} --cap-lints warn",
|
format!("{} --cap-lints warn", env.get("RUSTFLAGS").cloned().unwrap_or_default());
|
||||||
env.get("RUSTFLAGS").cloned().unwrap_or_default()
|
|
||||||
);
|
|
||||||
env.insert("RUSTFLAGS".to_string(), rustflags);
|
env.insert("RUSTFLAGS".to_string(), rustflags);
|
||||||
|
|
||||||
let path = Path::new(crate::BUILD_DIR).join("rand");
|
let path = Path::new(crate::BUILD_DIR).join("rand");
|
||||||
|
@ -788,18 +731,11 @@ fn extended_regex_example_tests(env: &Env, args: &TestArg) -> Result<(), String>
|
||||||
println!("[TEST] rust-lang/regex example shootout-regex-dna");
|
println!("[TEST] rust-lang/regex example shootout-regex-dna");
|
||||||
let mut env = env.clone();
|
let mut env = env.clone();
|
||||||
// newer aho_corasick versions throw a deprecation warning
|
// newer aho_corasick versions throw a deprecation warning
|
||||||
let rustflags = format!(
|
let rustflags =
|
||||||
"{} --cap-lints warn",
|
format!("{} --cap-lints warn", env.get("RUSTFLAGS").cloned().unwrap_or_default());
|
||||||
env.get("RUSTFLAGS").cloned().unwrap_or_default()
|
|
||||||
);
|
|
||||||
env.insert("RUSTFLAGS".to_string(), rustflags);
|
env.insert("RUSTFLAGS".to_string(), rustflags);
|
||||||
// Make sure `[codegen mono items] start` doesn't poison the diff
|
// Make sure `[codegen mono items] start` doesn't poison the diff
|
||||||
run_cargo_command(
|
run_cargo_command(&[&"build", &"--example", &"shootout-regex-dna"], Some(&path), &env, args)?;
|
||||||
&[&"build", &"--example", &"shootout-regex-dna"],
|
|
||||||
Some(&path),
|
|
||||||
&env,
|
|
||||||
args,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
run_cargo_command_with_callback(
|
run_cargo_command_with_callback(
|
||||||
&[&"run", &"--example", &"shootout-regex-dna"],
|
&[&"run", &"--example", &"shootout-regex-dna"],
|
||||||
|
@ -810,10 +746,8 @@ fn extended_regex_example_tests(env: &Env, args: &TestArg) -> Result<(), String>
|
||||||
// FIXME: rewrite this with `child.stdin.write_all()` because
|
// FIXME: rewrite this with `child.stdin.write_all()` because
|
||||||
// `examples/regexdna-input.txt` is very small.
|
// `examples/regexdna-input.txt` is very small.
|
||||||
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"bash", &"-c"];
|
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"bash", &"-c"];
|
||||||
let cargo_args = cargo_command
|
let cargo_args =
|
||||||
.iter()
|
cargo_command.iter().map(|s| s.as_ref().to_str().unwrap()).collect::<Vec<_>>();
|
||||||
.map(|s| s.as_ref().to_str().unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let bash_command = format!(
|
let bash_command = format!(
|
||||||
"cat examples/regexdna-input.txt | {} | grep -v 'Spawned thread' > res.txt",
|
"cat examples/regexdna-input.txt | {} | grep -v 'Spawned thread' > res.txt",
|
||||||
cargo_args.join(" "),
|
cargo_args.join(" "),
|
||||||
|
@ -841,10 +775,8 @@ fn extended_regex_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
println!("[TEST] rust-lang/regex tests");
|
println!("[TEST] rust-lang/regex tests");
|
||||||
let mut env = env.clone();
|
let mut env = env.clone();
|
||||||
// newer aho_corasick versions throw a deprecation warning
|
// newer aho_corasick versions throw a deprecation warning
|
||||||
let rustflags = format!(
|
let rustflags =
|
||||||
"{} --cap-lints warn",
|
format!("{} --cap-lints warn", env.get("RUSTFLAGS").cloned().unwrap_or_default());
|
||||||
env.get("RUSTFLAGS").cloned().unwrap_or_default()
|
|
||||||
);
|
|
||||||
env.insert("RUSTFLAGS".to_string(), rustflags);
|
env.insert("RUSTFLAGS".to_string(), rustflags);
|
||||||
let path = Path::new(crate::BUILD_DIR).join("regex");
|
let path = Path::new(crate::BUILD_DIR).join("regex");
|
||||||
run_cargo_command(
|
run_cargo_command(
|
||||||
|
@ -884,7 +816,7 @@ fn extended_sysroot_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_not_remove_test(file: &str) -> bool {
|
fn valid_ui_error_pattern_test(file: &str) -> bool {
|
||||||
// contains //~ERROR, but shouldn't be removed
|
// contains //~ERROR, but shouldn't be removed
|
||||||
[
|
[
|
||||||
"issues/auxiliary/issue-3136-a.rs",
|
"issues/auxiliary/issue-3136-a.rs",
|
||||||
|
@ -899,7 +831,8 @@ fn should_not_remove_test(file: &str) -> bool {
|
||||||
.any(|to_ignore| file.ends_with(to_ignore))
|
.any(|to_ignore| file.ends_with(to_ignore))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_remove_test(file_path: &Path) -> Result<bool, String> {
|
#[rustfmt::skip]
|
||||||
|
fn contains_ui_error_patterns(file_path: &Path) -> Result<bool, String> {
|
||||||
// Tests generating errors.
|
// Tests generating errors.
|
||||||
let file = File::open(file_path)
|
let file = File::open(file_path)
|
||||||
.map_err(|error| format!("Failed to read `{}`: {:?}", file_path.display(), error))?;
|
.map_err(|error| format!("Failed to read `{}`: {:?}", file_path.display(), error))?;
|
||||||
|
@ -916,8 +849,8 @@ fn should_remove_test(file_path: &Path) -> Result<bool, String> {
|
||||||
"//~",
|
"//~",
|
||||||
"thread",
|
"thread",
|
||||||
]
|
]
|
||||||
.iter()
|
.iter()
|
||||||
.any(|check| line.contains(check))
|
.any(|check| line.contains(check))
|
||||||
{
|
{
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
@ -925,17 +858,27 @@ fn should_remove_test(file_path: &Path) -> Result<bool, String> {
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if file_path
|
if file_path.display().to_string().contains("ambiguous-4-extern.rs") {
|
||||||
.display()
|
|
||||||
.to_string()
|
|
||||||
.contains("ambiguous-4-extern.rs")
|
|
||||||
{
|
|
||||||
eprintln!("nothing found for {file_path:?}");
|
eprintln!("nothing found for {file_path:?}");
|
||||||
}
|
}
|
||||||
Ok(false)
|
Ok(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_rustc_inner<F>(env: &Env, args: &TestArg, prepare_files_callback: F) -> Result<(), String>
|
// # Parameters
|
||||||
|
//
|
||||||
|
// * `env`: An environment variable that provides context for the function.
|
||||||
|
// * `args`: The arguments passed to the test. This could include things like the flags, config etc.
|
||||||
|
// * `prepare_files_callback`: A callback function that prepares the files needed for the test. Its used to remove/retain tests giving Error to run various rust test suits.
|
||||||
|
// * `run_error_pattern_test`: A boolean that determines whether to run only error pattern tests.
|
||||||
|
// * `test_type`: A string that indicates the type of the test being run.
|
||||||
|
//
|
||||||
|
fn test_rustc_inner<F>(
|
||||||
|
env: &Env,
|
||||||
|
args: &TestArg,
|
||||||
|
prepare_files_callback: F,
|
||||||
|
run_error_pattern_test: bool,
|
||||||
|
test_type: &str,
|
||||||
|
) -> Result<(), String>
|
||||||
where
|
where
|
||||||
F: Fn(&Path) -> Result<bool, String>,
|
F: Fn(&Path) -> Result<bool, String>,
|
||||||
{
|
{
|
||||||
|
@ -944,139 +887,138 @@ where
|
||||||
let mut env = env.clone();
|
let mut env = env.clone();
|
||||||
let rust_path = setup_rustc(&mut env, args)?;
|
let rust_path = setup_rustc(&mut env, args)?;
|
||||||
|
|
||||||
walk_dir(
|
|
||||||
rust_path.join("tests/ui"),
|
|
||||||
|dir| {
|
|
||||||
let dir_name = dir.file_name().and_then(|name| name.to_str()).unwrap_or("");
|
|
||||||
if [
|
|
||||||
"abi",
|
|
||||||
"extern",
|
|
||||||
"unsized-locals",
|
|
||||||
"proc-macro",
|
|
||||||
"threads-sendsync",
|
|
||||||
"borrowck",
|
|
||||||
"test-attrs",
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.any(|name| *name == dir_name)
|
|
||||||
{
|
|
||||||
std::fs::remove_dir_all(dir).map_err(|error| {
|
|
||||||
format!("Failed to remove folder `{}`: {:?}", dir.display(), error)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
|_| Ok(()),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// These two functions are used to remove files that are known to not be working currently
|
|
||||||
// with the GCC backend to reduce noise.
|
|
||||||
fn dir_handling(dir: &Path) -> Result<(), String> {
|
|
||||||
if dir
|
|
||||||
.file_name()
|
|
||||||
.map(|name| name == "auxiliary")
|
|
||||||
.unwrap_or(true)
|
|
||||||
{
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
walk_dir(dir, dir_handling, file_handling)
|
|
||||||
}
|
|
||||||
fn file_handling(file_path: &Path) -> Result<(), String> {
|
|
||||||
if !file_path
|
|
||||||
.extension()
|
|
||||||
.map(|extension| extension == "rs")
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let path_str = file_path.display().to_string().replace("\\", "/");
|
|
||||||
if should_not_remove_test(&path_str) {
|
|
||||||
return Ok(());
|
|
||||||
} else if should_remove_test(file_path)? {
|
|
||||||
return remove_file(&file_path);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
remove_file(&rust_path.join("tests/ui/consts/const_cmp_type_id.rs"))?;
|
|
||||||
remove_file(&rust_path.join("tests/ui/consts/issue-73976-monomorphic.rs"))?;
|
|
||||||
// this test is oom-killed in the CI.
|
|
||||||
remove_file(&rust_path.join("tests/ui/consts/issue-miri-1910.rs"))?;
|
|
||||||
// Tests generating errors.
|
|
||||||
remove_file(&rust_path.join("tests/ui/consts/issue-94675.rs"))?;
|
|
||||||
remove_file(&rust_path.join("tests/ui/mir/mir_heavy_promoted.rs"))?;
|
|
||||||
remove_file(&rust_path.join("tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail.rs"))?;
|
|
||||||
remove_file(&rust_path.join("tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs"))?;
|
|
||||||
|
|
||||||
walk_dir(rust_path.join("tests/ui"), dir_handling, file_handling)?;
|
|
||||||
|
|
||||||
if !prepare_files_callback(&rust_path)? {
|
if !prepare_files_callback(&rust_path)? {
|
||||||
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
||||||
println!("Keeping all UI tests");
|
println!("Keeping all {} tests", test_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
let nb_parts = args.nb_parts.unwrap_or(0);
|
if test_type == "ui" {
|
||||||
if nb_parts > 0 {
|
if run_error_pattern_test {
|
||||||
let current_part = args.current_part.unwrap();
|
// After we removed the error tests that are known to panic with rustc_codegen_gcc, we now remove the passing tests since this runs the error tests.
|
||||||
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
walk_dir(
|
||||||
println!(
|
rust_path.join("tests/ui"),
|
||||||
"Splitting ui_test into {} parts (and running part {})",
|
&mut |_dir| Ok(()),
|
||||||
nb_parts, current_part
|
&mut |file_path| {
|
||||||
);
|
if contains_ui_error_patterns(file_path)? {
|
||||||
let out = String::from_utf8(
|
Ok(())
|
||||||
run_command(
|
} else {
|
||||||
&[
|
remove_file(file_path).map_err(|e| e.to_string())
|
||||||
&"find",
|
}
|
||||||
&"tests/ui",
|
},
|
||||||
&"-type",
|
true,
|
||||||
&"f",
|
)?;
|
||||||
&"-name",
|
} else {
|
||||||
&"*.rs",
|
walk_dir(
|
||||||
&"-not",
|
rust_path.join("tests/ui"),
|
||||||
&"-path",
|
&mut |dir| {
|
||||||
&"*/auxiliary/*",
|
let dir_name = dir.file_name().and_then(|name| name.to_str()).unwrap_or("");
|
||||||
],
|
if [
|
||||||
Some(&rust_path),
|
"abi",
|
||||||
)?
|
"extern",
|
||||||
.stdout,
|
"unsized-locals",
|
||||||
)
|
"proc-macro",
|
||||||
.map_err(|error| format!("Failed to retrieve output of find command: {:?}", error))?;
|
"threads-sendsync",
|
||||||
let mut files = out
|
"borrowck",
|
||||||
.split('\n')
|
"test-attrs",
|
||||||
.map(|line| line.trim())
|
]
|
||||||
.filter(|line| !line.is_empty())
|
.iter()
|
||||||
.collect::<Vec<_>>();
|
.any(|name| *name == dir_name)
|
||||||
// To ensure it'll be always the same sub files, we sort the content.
|
{
|
||||||
files.sort();
|
std::fs::remove_dir_all(dir).map_err(|error| {
|
||||||
// We increment the number of tests by one because if this is an odd number, we would skip
|
format!("Failed to remove folder `{}`: {:?}", dir.display(), error)
|
||||||
// one test.
|
})?;
|
||||||
let count = files.len() / nb_parts + 1;
|
}
|
||||||
let start = current_part * count;
|
Ok(())
|
||||||
// We remove the files we don't want to test.
|
},
|
||||||
for path in files.iter().skip(start).take(count) {
|
&mut |_| Ok(()),
|
||||||
remove_file(&rust_path.join(path))?;
|
false,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// These two functions are used to remove files that are known to not be working currently
|
||||||
|
// with the GCC backend to reduce noise.
|
||||||
|
fn dir_handling(dir: &Path) -> Result<(), String> {
|
||||||
|
if dir.file_name().map(|name| name == "auxiliary").unwrap_or(true) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
walk_dir(dir, &mut dir_handling, &mut file_handling, false)
|
||||||
|
}
|
||||||
|
fn file_handling(file_path: &Path) -> Result<(), String> {
|
||||||
|
if !file_path.extension().map(|extension| extension == "rs").unwrap_or(false) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let path_str = file_path.display().to_string().replace("\\", "/");
|
||||||
|
if valid_ui_error_pattern_test(&path_str) {
|
||||||
|
return Ok(());
|
||||||
|
} else if contains_ui_error_patterns(file_path)? {
|
||||||
|
return remove_file(&file_path);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
walk_dir(rust_path.join("tests/ui"), &mut dir_handling, &mut file_handling, false)?;
|
||||||
|
}
|
||||||
|
let nb_parts = args.nb_parts.unwrap_or(0);
|
||||||
|
if nb_parts > 0 {
|
||||||
|
let current_part = args.current_part.unwrap();
|
||||||
|
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
||||||
|
println!(
|
||||||
|
"Splitting ui_test into {} parts (and running part {})",
|
||||||
|
nb_parts, current_part
|
||||||
|
);
|
||||||
|
let out = String::from_utf8(
|
||||||
|
run_command(
|
||||||
|
&[
|
||||||
|
&"find",
|
||||||
|
&"tests/ui",
|
||||||
|
&"-type",
|
||||||
|
&"f",
|
||||||
|
&"-name",
|
||||||
|
&"*.rs",
|
||||||
|
&"-not",
|
||||||
|
&"-path",
|
||||||
|
&"*/auxiliary/*",
|
||||||
|
],
|
||||||
|
Some(&rust_path),
|
||||||
|
)?
|
||||||
|
.stdout,
|
||||||
|
)
|
||||||
|
.map_err(|error| format!("Failed to retrieve output of find command: {:?}", error))?;
|
||||||
|
let mut files = out
|
||||||
|
.split('\n')
|
||||||
|
.map(|line| line.trim())
|
||||||
|
.filter(|line| !line.is_empty())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
// To ensure it'll be always the same sub files, we sort the content.
|
||||||
|
files.sort();
|
||||||
|
// We increment the number of tests by one because if this is an odd number, we would skip
|
||||||
|
// one test.
|
||||||
|
let count = files.len() / nb_parts + 1;
|
||||||
|
// We remove the files we don't want to test.
|
||||||
|
let start = current_part * count;
|
||||||
|
for path in files.iter().skip(start).take(count) {
|
||||||
|
remove_file(&rust_path.join(path))?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
// FIXME: create a function "display_if_not_quiet" or something along the line.
|
||||||
println!("[TEST] rustc test suite");
|
println!("[TEST] rustc {} test suite", test_type);
|
||||||
env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string());
|
env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string());
|
||||||
|
|
||||||
let extra = if args.is_using_gcc_master_branch() {
|
let extra =
|
||||||
""
|
if args.is_using_gcc_master_branch() { "" } else { " -Csymbol-mangling-version=v0" };
|
||||||
} else {
|
|
||||||
" -Csymbol-mangling-version=v0"
|
|
||||||
};
|
|
||||||
|
|
||||||
let rustc_args = format!(
|
let rustc_args = format!(
|
||||||
"{} -Zcodegen-backend={} --sysroot {}{}",
|
"{test_flags} -Zcodegen-backend={backend} --sysroot {sysroot}{extra}",
|
||||||
env.get("TEST_FLAGS").unwrap_or(&String::new()),
|
test_flags = env.get("TEST_FLAGS").unwrap_or(&String::new()),
|
||||||
args.config_info.cg_backend_path,
|
backend = args.config_info.cg_backend_path,
|
||||||
args.config_info.sysroot_path,
|
sysroot = args.config_info.sysroot_path,
|
||||||
extra,
|
extra = extra,
|
||||||
);
|
);
|
||||||
|
|
||||||
env.get_mut("RUSTFLAGS").unwrap().clear();
|
env.get_mut("RUSTFLAGS").unwrap().clear();
|
||||||
|
|
||||||
run_command_with_output_and_env(
|
run_command_with_output_and_env(
|
||||||
&[
|
&[
|
||||||
&"./x.py",
|
&"./x.py",
|
||||||
|
@ -1085,7 +1027,7 @@ where
|
||||||
&"always",
|
&"always",
|
||||||
&"--stage",
|
&"--stage",
|
||||||
&"0",
|
&"0",
|
||||||
&"tests/ui",
|
&format!("tests/{}", test_type),
|
||||||
&"--rustc-args",
|
&"--rustc-args",
|
||||||
&rustc_args,
|
&rustc_args,
|
||||||
],
|
],
|
||||||
|
@ -1096,68 +1038,162 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
|
fn test_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
test_rustc_inner(env, args, |_| Ok(false))
|
//test_rustc_inner(env, args, |_| Ok(false), false, "run-make")?;
|
||||||
|
test_rustc_inner(env, args, |_| Ok(false), false, "ui")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
|
fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
test_rustc_inner(env, args, |rust_path| {
|
let result1 = Ok(());
|
||||||
// Removing all tests.
|
/*test_rustc_inner(
|
||||||
run_command(
|
env,
|
||||||
&[
|
args,
|
||||||
&"find",
|
retain_files_callback("tests/failing-run-make-tests.txt", "run-make"),
|
||||||
&"tests/ui",
|
false,
|
||||||
&"-type",
|
"run-make",
|
||||||
&"f",
|
)*/
|
||||||
&"-name",
|
|
||||||
&"*.rs",
|
let result2 = test_rustc_inner(
|
||||||
&"-not",
|
env,
|
||||||
&"-path",
|
args,
|
||||||
&"*/auxiliary/*",
|
retain_files_callback("tests/failing-ui-tests.txt", "ui"),
|
||||||
&"-delete",
|
false,
|
||||||
],
|
"ui",
|
||||||
Some(rust_path),
|
);
|
||||||
)?;
|
|
||||||
|
result1.and(result2)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_successful_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
|
test_rustc_inner(
|
||||||
|
env,
|
||||||
|
args,
|
||||||
|
remove_files_callback("tests/failing-ui-tests.txt", "ui"),
|
||||||
|
false,
|
||||||
|
"ui",
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
/*test_rustc_inner(
|
||||||
|
env,
|
||||||
|
args,
|
||||||
|
remove_files_callback("tests/failing-run-make-tests.txt", "run-make"),
|
||||||
|
false,
|
||||||
|
"run-make",
|
||||||
|
)*/
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_failing_ui_pattern_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
|
test_rustc_inner(
|
||||||
|
env,
|
||||||
|
args,
|
||||||
|
remove_files_callback("tests/failing-ice-tests.txt", "ui"),
|
||||||
|
true,
|
||||||
|
"ui",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn retain_files_callback<'a>(
|
||||||
|
file_path: &'a str,
|
||||||
|
test_type: &'a str,
|
||||||
|
) -> impl Fn(&Path) -> Result<bool, String> + 'a {
|
||||||
|
move |rust_path| {
|
||||||
|
let files = std::fs::read_to_string(file_path).unwrap_or_default();
|
||||||
|
let first_file_name = files.lines().next().unwrap_or("");
|
||||||
|
// If the first line ends with a `/`, we treat all lines in the file as a directory.
|
||||||
|
if first_file_name.ends_with('/') {
|
||||||
|
// Treat as directory
|
||||||
|
// Removing all tests.
|
||||||
|
run_command(
|
||||||
|
&[
|
||||||
|
&"find",
|
||||||
|
&format!("tests/{}", test_type),
|
||||||
|
&"-mindepth",
|
||||||
|
&"1",
|
||||||
|
&"-type",
|
||||||
|
&"d",
|
||||||
|
&"-exec",
|
||||||
|
&"rm",
|
||||||
|
&"-rf",
|
||||||
|
&"{}",
|
||||||
|
&"+",
|
||||||
|
],
|
||||||
|
Some(rust_path),
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
// Treat as file
|
||||||
|
// Removing all tests.
|
||||||
|
run_command(
|
||||||
|
&[
|
||||||
|
&"find",
|
||||||
|
&format!("tests/{}", test_type),
|
||||||
|
&"-type",
|
||||||
|
&"f",
|
||||||
|
&"-name",
|
||||||
|
&"*.rs",
|
||||||
|
&"-not",
|
||||||
|
&"-path",
|
||||||
|
&"*/auxiliary/*",
|
||||||
|
&"-delete",
|
||||||
|
],
|
||||||
|
Some(rust_path),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
// Putting back only the failing ones.
|
// Putting back only the failing ones.
|
||||||
let path = "tests/failing-ui-tests.txt";
|
if let Ok(files) = std::fs::read_to_string(&file_path) {
|
||||||
if let Ok(files) = std::fs::read_to_string(path) {
|
for file in files.split('\n').map(|line| line.trim()).filter(|line| !line.is_empty()) {
|
||||||
for file in files
|
|
||||||
.split('\n')
|
|
||||||
.map(|line| line.trim())
|
|
||||||
.filter(|line| !line.is_empty())
|
|
||||||
{
|
|
||||||
run_command(&[&"git", &"checkout", &"--", &file], Some(&rust_path))?;
|
run_command(&[&"git", &"checkout", &"--", &file], Some(&rust_path))?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
println!(
|
println!(
|
||||||
"Failed to read `{}`, not putting back failing ui tests",
|
"Failed to read `{}`, not putting back failing {} tests",
|
||||||
path
|
file_path, test_type
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(true)
|
Ok(true)
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_successful_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
|
fn remove_files_callback<'a>(
|
||||||
test_rustc_inner(env, args, |rust_path| {
|
file_path: &'a str,
|
||||||
// Removing the failing tests.
|
test_type: &'a str,
|
||||||
let path = "tests/failing-ui-tests.txt";
|
) -> impl Fn(&Path) -> Result<bool, String> + 'a {
|
||||||
if let Ok(files) = std::fs::read_to_string(path) {
|
move |rust_path| {
|
||||||
for file in files
|
let files = std::fs::read_to_string(file_path).unwrap_or_default();
|
||||||
.split('\n')
|
let first_file_name = files.lines().next().unwrap_or("");
|
||||||
.map(|line| line.trim())
|
// If the first line ends with a `/`, we treat all lines in the file as a directory.
|
||||||
.filter(|line| !line.is_empty())
|
if first_file_name.ends_with('/') {
|
||||||
{
|
// Removing the failing tests.
|
||||||
let path = rust_path.join(file);
|
if let Ok(files) = std::fs::read_to_string(file_path) {
|
||||||
remove_file(&path)?;
|
for file in
|
||||||
|
files.split('\n').map(|line| line.trim()).filter(|line| !line.is_empty())
|
||||||
|
{
|
||||||
|
let path = rust_path.join(file);
|
||||||
|
if let Err(e) = std::fs::remove_dir_all(&path) {
|
||||||
|
println!("Failed to remove directory `{}`: {}", path.display(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"Failed to read `{}`, not putting back failing {} tests",
|
||||||
|
file_path, test_type
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
println!(
|
// Removing the failing tests.
|
||||||
"Failed to read `{}`, not putting back failing ui tests",
|
if let Ok(files) = std::fs::read_to_string(file_path) {
|
||||||
path
|
for file in
|
||||||
);
|
files.split('\n').map(|line| line.trim()).filter(|line| !line.is_empty())
|
||||||
|
{
|
||||||
|
let path = rust_path.join(file);
|
||||||
|
remove_file(&path)?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!("Failed to read `{}`, not putting back failing ui tests", file_path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(true)
|
Ok(true)
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_all(env: &Env, args: &TestArg) -> Result<(), String> {
|
fn run_all(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||||
|
@ -1181,14 +1217,8 @@ pub fn run() -> Result<(), String> {
|
||||||
|
|
||||||
if !args.use_system_gcc {
|
if !args.use_system_gcc {
|
||||||
args.config_info.setup_gcc_path()?;
|
args.config_info.setup_gcc_path()?;
|
||||||
env.insert(
|
env.insert("LIBRARY_PATH".to_string(), args.config_info.gcc_path.clone());
|
||||||
"LIBRARY_PATH".to_string(),
|
env.insert("LD_LIBRARY_PATH".to_string(), args.config_info.gcc_path.clone());
|
||||||
args.config_info.gcc_path.clone(),
|
|
||||||
);
|
|
||||||
env.insert(
|
|
||||||
"LD_LIBRARY_PATH".to_string(),
|
|
||||||
args.config_info.gcc_path.clone(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
build_if_no_backend(&env, &args)?;
|
build_if_no_backend(&env, &args)?;
|
||||||
|
|
|
@ -1,10 +1,42 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
#[cfg(unix)]
|
||||||
|
use std::ffi::c_int;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
#[cfg(unix)]
|
||||||
|
use std::os::unix::process::ExitStatusExt;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::{Command, ExitStatus, Output};
|
use std::process::{Command, ExitStatus, Output};
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
extern "C" {
|
||||||
|
fn raise(signal: c_int) -> c_int;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exec_command(
|
||||||
|
input: &[&dyn AsRef<OsStr>],
|
||||||
|
cwd: Option<&Path>,
|
||||||
|
env: Option<&HashMap<String, String>>,
|
||||||
|
) -> Result<ExitStatus, String> {
|
||||||
|
let status = get_command_inner(input, cwd, env)
|
||||||
|
.spawn()
|
||||||
|
.map_err(|e| command_error(input, &cwd, e))?
|
||||||
|
.wait()
|
||||||
|
.map_err(|e| command_error(input, &cwd, e))?;
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
if let Some(signal) = status.signal() {
|
||||||
|
unsafe {
|
||||||
|
raise(signal as _);
|
||||||
|
}
|
||||||
|
// In case the signal didn't kill the current process.
|
||||||
|
return Err(command_error(input, &cwd, format!("Process received signal {}", signal)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(status)
|
||||||
|
}
|
||||||
|
|
||||||
fn get_command_inner(
|
fn get_command_inner(
|
||||||
input: &[&dyn AsRef<OsStr>],
|
input: &[&dyn AsRef<OsStr>],
|
||||||
cwd: Option<&Path>,
|
cwd: Option<&Path>,
|
||||||
|
@ -37,13 +69,8 @@ fn check_exit_status(
|
||||||
}
|
}
|
||||||
let mut error = format!(
|
let mut error = format!(
|
||||||
"Command `{}`{} exited with status {:?}",
|
"Command `{}`{} exited with status {:?}",
|
||||||
input
|
input.iter().map(|s| s.as_ref().to_str().unwrap()).collect::<Vec<_>>().join(" "),
|
||||||
.iter()
|
cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display())).unwrap_or_default(),
|
||||||
.map(|s| s.as_ref().to_str().unwrap())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" "),
|
|
||||||
cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display()))
|
|
||||||
.unwrap_or_default(),
|
|
||||||
exit_status.code()
|
exit_status.code()
|
||||||
);
|
);
|
||||||
let input = input.iter().map(|i| i.as_ref()).collect::<Vec<&OsStr>>();
|
let input = input.iter().map(|i| i.as_ref()).collect::<Vec<&OsStr>>();
|
||||||
|
@ -68,11 +95,7 @@ fn check_exit_status(
|
||||||
fn command_error<D: Debug>(input: &[&dyn AsRef<OsStr>], cwd: &Option<&Path>, error: D) -> String {
|
fn command_error<D: Debug>(input: &[&dyn AsRef<OsStr>], cwd: &Option<&Path>, error: D) -> String {
|
||||||
format!(
|
format!(
|
||||||
"Command `{}`{} failed to run: {error:?}",
|
"Command `{}`{} failed to run: {error:?}",
|
||||||
input
|
input.iter().map(|s| s.as_ref().to_str().unwrap()).collect::<Vec<_>>().join(" "),
|
||||||
.iter()
|
|
||||||
.map(|s| s.as_ref().to_str().unwrap())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" "),
|
|
||||||
cwd.as_ref()
|
cwd.as_ref()
|
||||||
.map(|cwd| format!(" (running in folder `{}`)", cwd.display(),))
|
.map(|cwd| format!(" (running in folder `{}`)", cwd.display(),))
|
||||||
.unwrap_or_default(),
|
.unwrap_or_default(),
|
||||||
|
@ -88,9 +111,8 @@ pub fn run_command_with_env(
|
||||||
cwd: Option<&Path>,
|
cwd: Option<&Path>,
|
||||||
env: Option<&HashMap<String, String>>,
|
env: Option<&HashMap<String, String>>,
|
||||||
) -> Result<Output, String> {
|
) -> Result<Output, String> {
|
||||||
let output = get_command_inner(input, cwd, env)
|
let output =
|
||||||
.output()
|
get_command_inner(input, cwd, env).output().map_err(|e| command_error(input, &cwd, e))?;
|
||||||
.map_err(|e| command_error(input, &cwd, e))?;
|
|
||||||
check_exit_status(input, cwd, output.status, Some(&output), true)?;
|
check_exit_status(input, cwd, output.status, Some(&output), true)?;
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
@ -99,11 +121,7 @@ pub fn run_command_with_output(
|
||||||
input: &[&dyn AsRef<OsStr>],
|
input: &[&dyn AsRef<OsStr>],
|
||||||
cwd: Option<&Path>,
|
cwd: Option<&Path>,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let exit_status = get_command_inner(input, cwd, None)
|
let exit_status = exec_command(input, cwd, None)?;
|
||||||
.spawn()
|
|
||||||
.map_err(|e| command_error(input, &cwd, e))?
|
|
||||||
.wait()
|
|
||||||
.map_err(|e| command_error(input, &cwd, e))?;
|
|
||||||
check_exit_status(input, cwd, exit_status, None, true)?;
|
check_exit_status(input, cwd, exit_status, None, true)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -113,11 +131,7 @@ pub fn run_command_with_output_and_env(
|
||||||
cwd: Option<&Path>,
|
cwd: Option<&Path>,
|
||||||
env: Option<&HashMap<String, String>>,
|
env: Option<&HashMap<String, String>>,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let exit_status = get_command_inner(input, cwd, env)
|
let exit_status = exec_command(input, cwd, env)?;
|
||||||
.spawn()
|
|
||||||
.map_err(|e| command_error(input, &cwd, e))?
|
|
||||||
.wait()
|
|
||||||
.map_err(|e| command_error(input, &cwd, e))?;
|
|
||||||
check_exit_status(input, cwd, exit_status, None, true)?;
|
check_exit_status(input, cwd, exit_status, None, true)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -127,11 +141,7 @@ pub fn run_command_with_output_and_env_no_err(
|
||||||
cwd: Option<&Path>,
|
cwd: Option<&Path>,
|
||||||
env: Option<&HashMap<String, String>>,
|
env: Option<&HashMap<String, String>>,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let exit_status = get_command_inner(input, cwd, env)
|
let exit_status = exec_command(input, cwd, env)?;
|
||||||
.spawn()
|
|
||||||
.map_err(|e| command_error(input, &cwd, e))?
|
|
||||||
.wait()
|
|
||||||
.map_err(|e| command_error(input, &cwd, e))?;
|
|
||||||
check_exit_status(input, cwd, exit_status, None, false)?;
|
check_exit_status(input, cwd, exit_status, None, false)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -164,10 +174,7 @@ pub fn cargo_install(to_install: &str) -> Result<(), String> {
|
||||||
|
|
||||||
pub fn get_os_name() -> Result<String, String> {
|
pub fn get_os_name() -> Result<String, String> {
|
||||||
let output = run_command(&[&"uname"], None)?;
|
let output = run_command(&[&"uname"], None)?;
|
||||||
let name = std::str::from_utf8(&output.stdout)
|
let name = std::str::from_utf8(&output.stdout).unwrap_or("").trim().to_string();
|
||||||
.unwrap_or("")
|
|
||||||
.trim()
|
|
||||||
.to_string();
|
|
||||||
if !name.is_empty() {
|
if !name.is_empty() {
|
||||||
Ok(name)
|
Ok(name)
|
||||||
} else {
|
} else {
|
||||||
|
@ -274,11 +281,7 @@ fn git_clone_inner(
|
||||||
command.push(&"1");
|
command.push(&"1");
|
||||||
}
|
}
|
||||||
run_command_with_output(&command, None)?;
|
run_command_with_output(&command, None)?;
|
||||||
Ok(CloneResult {
|
Ok(CloneResult { ran_clone: true, repo_name, repo_dir: dest.display().to_string() })
|
||||||
ran_clone: true,
|
|
||||||
repo_name,
|
|
||||||
repo_dir: dest.display().to_string(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_repo_name(url: &str) -> String {
|
fn get_repo_name(url: &str) -> String {
|
||||||
|
@ -307,6 +310,25 @@ pub fn git_clone(
|
||||||
git_clone_inner(to_clone, dest, shallow_clone, repo_name)
|
git_clone_inner(to_clone, dest, shallow_clone, repo_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn create_dir<P: AsRef<Path>>(path: P) -> Result<(), String> {
|
||||||
|
fs::create_dir_all(&path).map_err(|error| {
|
||||||
|
format!("Failed to create directory `{}`: {:?}", path.as_ref().display(), error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_file<F: AsRef<Path>, T: AsRef<Path>>(from: F, to: T) -> Result<(), String> {
|
||||||
|
fs::copy(&from, &to)
|
||||||
|
.map_err(|error| {
|
||||||
|
format!(
|
||||||
|
"Failed to copy file `{}` into `{}`: {:?}",
|
||||||
|
from.as_ref().display(),
|
||||||
|
to.as_ref().display(),
|
||||||
|
error
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
/// This function differs from `git_clone` in how it handles *where* the repository will be cloned.
|
/// This function differs from `git_clone` in how it handles *where* the repository will be cloned.
|
||||||
/// In `git_clone`, it is cloned in the provided path. In this function, the path you provide is
|
/// In `git_clone`, it is cloned in the provided path. In this function, the path you provide is
|
||||||
/// the parent folder. So if you pass "a" as folder and try to clone "b.git", it will be cloned into
|
/// the parent folder. So if you pass "a" as folder and try to clone "b.git", it will be cloned into
|
||||||
|
@ -318,15 +340,15 @@ pub fn git_clone_root_dir(
|
||||||
) -> Result<CloneResult, String> {
|
) -> Result<CloneResult, String> {
|
||||||
let repo_name = get_repo_name(to_clone);
|
let repo_name = get_repo_name(to_clone);
|
||||||
|
|
||||||
git_clone_inner(
|
git_clone_inner(to_clone, &dest_parent_dir.join(&repo_name), shallow_clone, repo_name)
|
||||||
to_clone,
|
|
||||||
&dest_parent_dir.join(&repo_name),
|
|
||||||
shallow_clone,
|
|
||||||
repo_name,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn walk_dir<P, D, F>(dir: P, mut dir_cb: D, mut file_cb: F) -> Result<(), String>
|
pub fn walk_dir<P, D, F>(
|
||||||
|
dir: P,
|
||||||
|
dir_cb: &mut D,
|
||||||
|
file_cb: &mut F,
|
||||||
|
recursive: bool,
|
||||||
|
) -> Result<(), String>
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
D: FnMut(&Path) -> Result<(), String>,
|
D: FnMut(&Path) -> Result<(), String>,
|
||||||
|
@ -341,6 +363,9 @@ where
|
||||||
let entry_path = entry.path();
|
let entry_path = entry.path();
|
||||||
if entry_path.is_dir() {
|
if entry_path.is_dir() {
|
||||||
dir_cb(&entry_path)?;
|
dir_cb(&entry_path)?;
|
||||||
|
if recursive {
|
||||||
|
walk_dir(entry_path, dir_cb, file_cb, recursive)?; // Recursive call
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
file_cb(&entry_path)?;
|
file_cb(&entry_path)?;
|
||||||
}
|
}
|
||||||
|
@ -383,11 +408,7 @@ pub fn split_args(args: &str) -> Result<Vec<String>, String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !found_end {
|
if !found_end {
|
||||||
return Err(format!(
|
return Err(format!("Didn't find `{}` at the end of `{}`", end, &args[start..]));
|
||||||
"Didn't find `{}` at the end of `{}`",
|
|
||||||
end,
|
|
||||||
&args[start..]
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
} else if c == '\\' {
|
} else if c == '\\' {
|
||||||
// We skip the escaped character.
|
// We skip the escaped character.
|
||||||
|
@ -403,11 +424,7 @@ pub fn split_args(args: &str) -> Result<Vec<String>, String> {
|
||||||
|
|
||||||
pub fn remove_file<P: AsRef<Path> + ?Sized>(file_path: &P) -> Result<(), String> {
|
pub fn remove_file<P: AsRef<Path> + ?Sized>(file_path: &P) -> Result<(), String> {
|
||||||
std::fs::remove_file(file_path).map_err(|error| {
|
std::fs::remove_file(file_path).map_err(|error| {
|
||||||
format!(
|
format!("Failed to remove `{}`: {:?}", file_path.as_ref().display(), error)
|
||||||
"Failed to remove `{}`: {:?}",
|
|
||||||
file_path.as_ref().display(),
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -427,6 +444,10 @@ pub fn create_symlink<P: AsRef<Path>, Q: AsRef<Path>>(original: P, link: Q) -> R
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_sysroot_dir() -> PathBuf {
|
||||||
|
Path::new(crate::BUILD_DIR).join("build_sysroot")
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
gcc-path = "gcc-build/gcc"
|
#gcc-path = "gcc-build/gcc"
|
||||||
# download-gccjit = true
|
download-gccjit = true
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
INPUT(libLLVM.so.18.1-rust-1.78.0-nightly)
|
|
|
@ -35,6 +35,14 @@ COLLECT_NO_DEMANGLE=1
|
||||||
* Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
|
* Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
|
||||||
* Clean and rebuild the codegen with `debug-current` in the file `rust-toolchain`.
|
* Clean and rebuild the codegen with `debug-current` in the file `rust-toolchain`.
|
||||||
|
|
||||||
|
### How to use a custom sysroot source path
|
||||||
|
|
||||||
|
If you wish to build a custom sysroot, pass the path of your sysroot source to `--sysroot-source` during the `prepare` step, like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
./y.sh prepare --sysroot-source /path/to/custom/source
|
||||||
|
```
|
||||||
|
|
||||||
### How to use [mem-trace](https://github.com/antoyo/mem-trace)
|
### How to use [mem-trace](https://github.com/antoyo/mem-trace)
|
||||||
|
|
||||||
`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
|
`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
|
||||||
|
@ -54,13 +62,13 @@ generate it in [gimple.md](./doc/gimple.md).
|
||||||
|
|
||||||
* Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case.
|
* Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case.
|
||||||
* Set the path to the cross-compiling libgccjit in `gcc-path` (in `config.toml`).
|
* Set the path to the cross-compiling libgccjit in `gcc-path` (in `config.toml`).
|
||||||
* Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu`.
|
* Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu`.
|
||||||
* Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target m68k-unknown-linux-gnu`.
|
* Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target m68k-unknown-linux-gnu`.
|
||||||
|
|
||||||
If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler).
|
If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler).
|
||||||
Then, you can use it the following way:
|
Then, you can use it the following way:
|
||||||
|
|
||||||
* Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json`
|
* Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json`
|
||||||
* Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target path/to/m68k-unknown-linux-gnu.json`.
|
* Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target path/to/m68k-unknown-linux-gnu.json`.
|
||||||
|
|
||||||
If you get the following error:
|
If you get the following error:
|
||||||
|
|
|
@ -153,10 +153,9 @@ fn array_as_slice(arr: &[u8; 3]) -> &[u8] {
|
||||||
arr
|
arr
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: fix the intrinsic implementation to work with the new ->u32 signature
|
unsafe fn use_ctlz_nonzero(a: u16) -> u32 {
|
||||||
// unsafe fn use_ctlz_nonzero(a: u16) -> u32 {
|
intrinsics::ctlz_nonzero(a)
|
||||||
// intrinsics::ctlz_nonzero(a)
|
}
|
||||||
// }
|
|
||||||
|
|
||||||
fn ptr_as_usize(ptr: *const u8) -> usize {
|
fn ptr_as_usize(ptr: *const u8) -> usize {
|
||||||
ptr as usize
|
ptr as usize
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
b6f163f52
|
341be3b7d7ac6976cfed8ed59da3573c040d0776
|
||||||
|
|
|
@ -19,7 +19,7 @@ index 0000000..4c63700
|
||||||
+members = [
|
+members = [
|
||||||
+ "crates/core_arch",
|
+ "crates/core_arch",
|
||||||
+ "crates/std_detect",
|
+ "crates/std_detect",
|
||||||
+ "crates/stdarch-gen",
|
+ "crates/stdarch-gen-arm",
|
||||||
+ #"examples/"
|
+ #"examples/"
|
||||||
+]
|
+]
|
||||||
+exclude = [
|
+exclude = [
|
||||||
|
|
|
@ -39,4 +39,4 @@ index 42a26ae..5ac1042 100644
|
||||||
+#![cfg(test)]
|
+#![cfg(test)]
|
||||||
#![feature(alloc_layout_extra)]
|
#![feature(alloc_layout_extra)]
|
||||||
#![feature(array_chunks)]
|
#![feature(array_chunks)]
|
||||||
#![feature(array_windows)]
|
#![feature(array_ptr_get)]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
From a5663265f797a43c502915c356fe7899c16cee92 Mon Sep 17 00:00:00 2001
|
From 124a11ce086952a5794d5cfbaa45175809497b81 Mon Sep 17 00:00:00 2001
|
||||||
From: None <none@example.com>
|
From: None <none@example.com>
|
||||||
Date: Sat, 18 Nov 2023 10:50:36 -0500
|
Date: Sat, 18 Nov 2023 10:50:36 -0500
|
||||||
Subject: [PATCH] [core] Disable portable-simd test
|
Subject: [PATCH] [core] Disable portable-simd test
|
||||||
|
@ -8,18 +8,18 @@ Subject: [PATCH] [core] Disable portable-simd test
|
||||||
1 file changed, 2 deletions(-)
|
1 file changed, 2 deletions(-)
|
||||||
|
|
||||||
diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs
|
diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs
|
||||||
index d0a119c..76fdece 100644
|
index b71786c..cf484d5 100644
|
||||||
--- a/library/core/tests/lib.rs
|
--- a/library/core/tests/lib.rs
|
||||||
+++ b/library/core/tests/lib.rs
|
+++ b/library/core/tests/lib.rs
|
||||||
@@ -89,7 +89,6 @@
|
@@ -95,7 +95,6 @@
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
#![feature(unwrap_infallible)]
|
#![feature(unwrap_infallible)]
|
||||||
#![feature(pointer_is_aligned_to)]
|
#![feature(pointer_is_aligned_to)]
|
||||||
-#![feature(portable_simd)]
|
-#![feature(portable_simd)]
|
||||||
#![feature(ptr_metadata)]
|
#![feature(ptr_metadata)]
|
||||||
#![feature(lazy_cell)]
|
|
||||||
#![feature(unsized_tuple_coercion)]
|
#![feature(unsized_tuple_coercion)]
|
||||||
@@ -155,7 +154,6 @@ mod pin;
|
#![feature(const_option)]
|
||||||
|
@@ -157,7 +156,6 @@ mod pin;
|
||||||
mod pin_macro;
|
mod pin_macro;
|
||||||
mod ptr;
|
mod ptr;
|
||||||
mod result;
|
mod result;
|
||||||
|
@ -28,5 +28,4 @@ index d0a119c..76fdece 100644
|
||||||
mod str;
|
mod str;
|
||||||
mod str_lossy;
|
mod str_lossy;
|
||||||
--
|
--
|
||||||
2.42.1
|
2.45.2
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "nightly-2024-03-05"
|
channel = "nightly-2024-07-02"
|
||||||
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
|
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
|
||||||
|
|
|
@ -4,6 +4,7 @@ use gccjit::{ToLValue, ToRValue, Type};
|
||||||
use rustc_codegen_ssa::traits::{AbiBuilderMethods, BaseTypeMethods};
|
use rustc_codegen_ssa::traits::{AbiBuilderMethods, BaseTypeMethods};
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use rustc_middle::bug;
|
use rustc_middle::bug;
|
||||||
|
use rustc_middle::ty::layout::LayoutOf;
|
||||||
use rustc_middle::ty::Ty;
|
use rustc_middle::ty::Ty;
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
use rustc_session::config;
|
use rustc_session::config;
|
||||||
|
@ -184,9 +185,17 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||||
}
|
}
|
||||||
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
|
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
|
||||||
assert!(!on_stack);
|
assert!(!on_stack);
|
||||||
let ty =
|
// Construct the type of a (wide) pointer to `ty`, and pass its two fields.
|
||||||
apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs, argument_tys.len());
|
// Any two ABI-compatible unsized types have the same metadata type and
|
||||||
apply_attrs(ty, &meta_attrs, argument_tys.len())
|
// moreover the same metadata value leads to the same dynamic size and
|
||||||
|
// alignment, so this respects ABI compatibility.
|
||||||
|
let ptr_ty = Ty::new_mut_ptr(cx.tcx, arg.layout.ty);
|
||||||
|
let ptr_layout = cx.layout_of(ptr_ty);
|
||||||
|
let typ1 = ptr_layout.scalar_pair_element_gcc_type(cx, 0);
|
||||||
|
let typ2 = ptr_layout.scalar_pair_element_gcc_type(cx, 1);
|
||||||
|
argument_tys.push(apply_attrs(typ1, &attrs, argument_tys.len()));
|
||||||
|
argument_tys.push(apply_attrs(typ2, &meta_attrs, argument_tys.len()));
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
argument_tys.push(arg_ty);
|
argument_tys.push(arg_ty);
|
||||||
|
|
|
@ -115,7 +115,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
span: &[Span],
|
span: &[Span],
|
||||||
instance: Instance<'_>,
|
instance: Instance<'_>,
|
||||||
dest: Option<Self::BasicBlock>,
|
dest: Option<Self::BasicBlock>,
|
||||||
_catch_funclet: Option<(Self::BasicBlock, Option<&Self::Funclet>)>,
|
_dest_catch_funclet: Option<(Self::BasicBlock, Option<&Self::Funclet>)>,
|
||||||
) {
|
) {
|
||||||
if options.contains(InlineAsmOptions::MAY_UNWIND) {
|
if options.contains(InlineAsmOptions::MAY_UNWIND) {
|
||||||
self.sess().dcx().create_err(UnwindingInlineAsm { span: span[0] }).emit();
|
self.sess().dcx().create_err(UnwindingInlineAsm { span: span[0] }).emit();
|
||||||
|
@ -485,9 +485,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
InlineAsmOperandRef::Label { label } => {
|
InlineAsmOperandRef::Label { label } => {
|
||||||
let label_gcc_index = labels.iter()
|
let label_gcc_index =
|
||||||
.position(|&l| l == label)
|
labels.iter().position(|&l| l == label).expect("wrong rust index");
|
||||||
.expect("wrong rust index");
|
|
||||||
let gcc_index = label_gcc_index + outputs.len() + inputs.len();
|
let gcc_index = label_gcc_index + outputs.len() + inputs.len();
|
||||||
push_to_template(Some('l'), gcc_index);
|
push_to_template(Some('l'), gcc_index);
|
||||||
}
|
}
|
||||||
|
@ -538,9 +537,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
if dest.is_none() && options.contains(InlineAsmOptions::NORETURN) {
|
if dest.is_none() && options.contains(InlineAsmOptions::NORETURN) {
|
||||||
let builtin_unreachable = self.context.get_builtin_function("__builtin_unreachable");
|
let builtin_unreachable = self.context.get_builtin_function("__builtin_unreachable");
|
||||||
let builtin_unreachable: RValue<'gcc> = unsafe {
|
let builtin_unreachable: RValue<'gcc> =
|
||||||
std::mem::transmute(builtin_unreachable)
|
unsafe { std::mem::transmute(builtin_unreachable) };
|
||||||
};
|
|
||||||
self.call(self.type_void(), None, None, builtin_unreachable, &[], None, None);
|
self.call(self.type_void(), None, None, builtin_unreachable, &[], None, None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -696,10 +694,12 @@ fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister {
|
||||||
fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegClass) -> Type<'gcc> {
|
fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegClass) -> Type<'gcc> {
|
||||||
match reg {
|
match reg {
|
||||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => cx.type_i32(),
|
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::preg) => unimplemented!(),
|
|
||||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg)
|
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg)
|
||||||
| InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
|
| InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
|
||||||
unimplemented!()
|
cx.type_vector(cx.type_i64(), 2)
|
||||||
|
}
|
||||||
|
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::preg) => {
|
||||||
|
unreachable!("clobber-only")
|
||||||
}
|
}
|
||||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::reg) => cx.type_i32(),
|
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg)
|
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg)
|
||||||
|
@ -710,21 +710,13 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
|
||||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg)
|
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg)
|
||||||
| InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low8)
|
| InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low8)
|
||||||
| InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low4) => {
|
| InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low4) => {
|
||||||
unimplemented!()
|
cx.type_vector(cx.type_i64(), 2)
|
||||||
}
|
}
|
||||||
InlineAsmRegClass::Avr(_) => unimplemented!(),
|
|
||||||
InlineAsmRegClass::Bpf(_) => unimplemented!(),
|
|
||||||
InlineAsmRegClass::Hexagon(HexagonInlineAsmRegClass::reg) => cx.type_i32(),
|
InlineAsmRegClass::Hexagon(HexagonInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::reg) => cx.type_i32(),
|
InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::freg) => cx.type_f32(),
|
InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::freg) => cx.type_f32(),
|
||||||
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(),
|
|
||||||
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => cx.type_i32(),
|
|
||||||
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => cx.type_i32(),
|
|
||||||
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => cx.type_i32(),
|
|
||||||
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => cx.type_f32(),
|
|
||||||
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::reg) => cx.type_i32(),
|
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::freg) => cx.type_f32(),
|
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::freg) => cx.type_f32(),
|
||||||
InlineAsmRegClass::Msp430(_) => unimplemented!(),
|
|
||||||
InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg16) => cx.type_i16(),
|
InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg16) => cx.type_i16(),
|
||||||
InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg32) => cx.type_i32(),
|
InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg32) => cx.type_i32(),
|
||||||
InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg64) => cx.type_i64(),
|
InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg64) => cx.type_i64(),
|
||||||
|
@ -737,26 +729,43 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
|
||||||
}
|
}
|
||||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::reg) => cx.type_i32(),
|
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::freg) => cx.type_f32(),
|
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::freg) => cx.type_f32(),
|
||||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::vreg) => cx.type_f32(),
|
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::vreg) => {
|
||||||
|
unreachable!("clobber-only")
|
||||||
|
}
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::reg)
|
InlineAsmRegClass::X86(X86InlineAsmRegClass::reg)
|
||||||
| InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_abcd) => cx.type_i32(),
|
| InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_abcd) => cx.type_i32(),
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_byte) => cx.type_i8(),
|
InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_byte) => cx.type_i8(),
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::mmx_reg) => unimplemented!(),
|
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::xmm_reg)
|
InlineAsmRegClass::X86(X86InlineAsmRegClass::xmm_reg)
|
||||||
| InlineAsmRegClass::X86(X86InlineAsmRegClass::ymm_reg)
|
| InlineAsmRegClass::X86(X86InlineAsmRegClass::ymm_reg)
|
||||||
| InlineAsmRegClass::X86(X86InlineAsmRegClass::zmm_reg) => cx.type_f32(),
|
| InlineAsmRegClass::X86(X86InlineAsmRegClass::zmm_reg) => cx.type_f32(),
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::x87_reg) => unimplemented!(),
|
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::kreg) => cx.type_i16(),
|
InlineAsmRegClass::X86(X86InlineAsmRegClass::kreg) => cx.type_i16(),
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::kreg0) => cx.type_i16(),
|
InlineAsmRegClass::X86(X86InlineAsmRegClass::x87_reg)
|
||||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::tmm_reg) => unimplemented!(),
|
| InlineAsmRegClass::X86(X86InlineAsmRegClass::mmx_reg)
|
||||||
InlineAsmRegClass::Wasm(WasmInlineAsmRegClass::local) => cx.type_i32(),
|
| InlineAsmRegClass::X86(X86InlineAsmRegClass::kreg0)
|
||||||
InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => {
|
| InlineAsmRegClass::X86(X86InlineAsmRegClass::tmm_reg) => {
|
||||||
bug!("LLVM backend does not support SPIR-V")
|
unreachable!("clobber-only")
|
||||||
}
|
}
|
||||||
|
InlineAsmRegClass::Wasm(WasmInlineAsmRegClass::local) => cx.type_i32(),
|
||||||
|
InlineAsmRegClass::Bpf(BpfInlineAsmRegClass::reg) => cx.type_i64(),
|
||||||
|
InlineAsmRegClass::Bpf(BpfInlineAsmRegClass::wreg) => cx.type_i32(),
|
||||||
|
InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg) => cx.type_i8(),
|
||||||
|
InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_upper) => cx.type_i8(),
|
||||||
|
InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_pair) => cx.type_i16(),
|
||||||
|
InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_iw) => cx.type_i16(),
|
||||||
|
InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_ptr) => cx.type_i16(),
|
||||||
InlineAsmRegClass::S390x(
|
InlineAsmRegClass::S390x(
|
||||||
S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr,
|
S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr,
|
||||||
) => cx.type_i32(),
|
) => cx.type_i32(),
|
||||||
InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(),
|
InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(),
|
||||||
|
InlineAsmRegClass::Msp430(Msp430InlineAsmRegClass::reg) => cx.type_i16(),
|
||||||
|
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
|
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => cx.type_i32(),
|
||||||
|
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => cx.type_i32(),
|
||||||
|
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => cx.type_i32(),
|
||||||
|
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => cx.type_f32(),
|
||||||
|
InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => {
|
||||||
|
bug!("GCC backend does not support SPIR-V")
|
||||||
|
}
|
||||||
InlineAsmRegClass::Err => unreachable!(),
|
InlineAsmRegClass::Err => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,7 +92,7 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
|
||||||
let mut function_features = function_features
|
let mut function_features = function_features
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|feat| to_gcc_features(cx.tcx.sess, feat).into_iter())
|
.flat_map(|feat| to_gcc_features(cx.tcx.sess, feat).into_iter())
|
||||||
.chain(codegen_fn_attrs.instruction_set.iter().map(|x| match x {
|
.chain(codegen_fn_attrs.instruction_set.iter().map(|x| match *x {
|
||||||
InstructionSetAttr::ArmA32 => "-thumb-mode", // TODO(antoyo): support removing feature.
|
InstructionSetAttr::ArmA32 => "-thumb-mode", // TODO(antoyo): support removing feature.
|
||||||
InstructionSetAttr::ArmT32 => "thumb-mode",
|
InstructionSetAttr::ArmT32 => "thumb-mode",
|
||||||
}))
|
}))
|
||||||
|
@ -118,8 +118,8 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
|
||||||
|
|
||||||
if feature.starts_with('-') {
|
if feature.starts_with('-') {
|
||||||
Some(format!("no{}", feature))
|
Some(format!("no{}", feature))
|
||||||
} else if feature.starts_with('+') {
|
} else if let Some(stripped) = feature.strip_prefix('+') {
|
||||||
Some(feature[1..].to_string())
|
Some(stripped.to_string())
|
||||||
} else {
|
} else {
|
||||||
Some(feature.to_string())
|
Some(feature.to_string())
|
||||||
}
|
}
|
||||||
|
@ -128,6 +128,12 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
|
||||||
.join(",");
|
.join(",");
|
||||||
if !target_features.is_empty() {
|
if !target_features.is_empty() {
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
func.add_attribute(FnAttribute::Target(&target_features));
|
match cx.sess().target.arch.as_ref() {
|
||||||
|
"x86" | "x86_64" | "powerpc" => {
|
||||||
|
func.add_attribute(FnAttribute::Target(&target_features))
|
||||||
|
}
|
||||||
|
// The target attribute is not supported on other targets in GCC.
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,13 +16,14 @@
|
||||||
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNtCs5JWOrf9uCus_5rayon11thread_pool19WORKER_THREAD_STATE7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
|
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNtCs5JWOrf9uCus_5rayon11thread_pool19WORKER_THREAD_STATE7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
|
||||||
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNvNtNtNtCsAj5i4SGTR7_3std4sync4mpmc5waker17current_thread_id5DUMMY7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
|
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNvNtNtNtCsAj5i4SGTR7_3std4sync4mpmc5waker17current_thread_id5DUMMY7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
|
||||||
// /usr/bin/ld: warning: incremental linking of LTO and non-LTO objects; using -flinker-output=nolto-rel which will bypass whole program optimization
|
// /usr/bin/ld: warning: incremental linking of LTO and non-LTO objects; using -flinker-output=nolto-rel which will bypass whole program optimization
|
||||||
use std::ffi::CString;
|
use std::ffi::{CStr, CString};
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use gccjit::OutputKind;
|
use gccjit::{Context, OutputKind};
|
||||||
use object::read::archive::ArchiveFile;
|
use object::read::archive::ArchiveFile;
|
||||||
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule};
|
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared};
|
||||||
use rustc_codegen_ssa::back::symbol_export;
|
use rustc_codegen_ssa::back::symbol_export;
|
||||||
use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput};
|
use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput};
|
||||||
use rustc_codegen_ssa::traits::*;
|
use rustc_codegen_ssa::traits::*;
|
||||||
|
@ -30,6 +31,7 @@ use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind};
|
||||||
use rustc_data_structures::memmap::Mmap;
|
use rustc_data_structures::memmap::Mmap;
|
||||||
use rustc_errors::{DiagCtxtHandle, FatalError};
|
use rustc_errors::{DiagCtxtHandle, FatalError};
|
||||||
use rustc_hir::def_id::LOCAL_CRATE;
|
use rustc_hir::def_id::LOCAL_CRATE;
|
||||||
|
use rustc_middle::bug;
|
||||||
use rustc_middle::dep_graph::WorkProduct;
|
use rustc_middle::dep_graph::WorkProduct;
|
||||||
use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel};
|
use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel};
|
||||||
use rustc_session::config::{CrateType, Lto};
|
use rustc_session::config::{CrateType, Lto};
|
||||||
|
@ -37,7 +39,7 @@ use tempfile::{tempdir, TempDir};
|
||||||
|
|
||||||
use crate::back::write::save_temp_bitcode;
|
use crate::back::write::save_temp_bitcode;
|
||||||
use crate::errors::{DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib};
|
use crate::errors::{DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib};
|
||||||
use crate::{to_gcc_opt_level, GccCodegenBackend, GccContext};
|
use crate::{to_gcc_opt_level, GccCodegenBackend, GccContext, SyncContext};
|
||||||
|
|
||||||
/// We keep track of the computed LTO cache keys from the previous
|
/// We keep track of the computed LTO cache keys from the previous
|
||||||
/// session to determine which CGUs we can reuse.
|
/// session to determine which CGUs we can reuse.
|
||||||
|
@ -128,8 +130,7 @@ fn prepare_lto(
|
||||||
}
|
}
|
||||||
|
|
||||||
let archive_data = unsafe {
|
let archive_data = unsafe {
|
||||||
Mmap::map(File::open(&path).expect("couldn't open rlib"))
|
Mmap::map(File::open(path).expect("couldn't open rlib")).expect("couldn't map rlib")
|
||||||
.expect("couldn't map rlib")
|
|
||||||
};
|
};
|
||||||
let archive = ArchiveFile::parse(&*archive_data).expect("wanted an rlib");
|
let archive = ArchiveFile::parse(&*archive_data).expect("wanted an rlib");
|
||||||
let obj_files = archive
|
let obj_files = archive
|
||||||
|
@ -349,6 +350,395 @@ impl ModuleBuffer {
|
||||||
|
|
||||||
impl ModuleBufferMethods for ModuleBuffer {
|
impl ModuleBufferMethods for ModuleBuffer {
|
||||||
fn data(&self) -> &[u8] {
|
fn data(&self) -> &[u8] {
|
||||||
unimplemented!("data not needed for GCC codegen");
|
&[]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Performs thin LTO by performing necessary global analysis and returning two
|
||||||
|
/// lists, one of the modules that need optimization and another for modules that
|
||||||
|
/// can simply be copied over from the incr. comp. cache.
|
||||||
|
pub(crate) fn run_thin(
|
||||||
|
cgcx: &CodegenContext<GccCodegenBackend>,
|
||||||
|
modules: Vec<(String, ThinBuffer)>,
|
||||||
|
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
|
||||||
|
) -> Result<(Vec<LtoModuleCodegen<GccCodegenBackend>>, Vec<WorkProduct>), FatalError> {
|
||||||
|
let dcx = cgcx.create_dcx();
|
||||||
|
let dcx = dcx.handle();
|
||||||
|
let lto_data = prepare_lto(cgcx, dcx)?;
|
||||||
|
/*let symbols_below_threshold =
|
||||||
|
symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();*/
|
||||||
|
if cgcx.opts.cg.linker_plugin_lto.enabled() {
|
||||||
|
unreachable!(
|
||||||
|
"We should never reach this case if the LTO step \
|
||||||
|
is deferred to the linker"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
thin_lto(
|
||||||
|
cgcx,
|
||||||
|
dcx,
|
||||||
|
modules,
|
||||||
|
lto_data.upstream_modules,
|
||||||
|
lto_data.tmp_path,
|
||||||
|
cached_modules, /*, &symbols_below_threshold*/
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn prepare_thin(
|
||||||
|
module: ModuleCodegen<GccContext>,
|
||||||
|
_emit_summary: bool,
|
||||||
|
) -> (String, ThinBuffer) {
|
||||||
|
let name = module.name;
|
||||||
|
//let buffer = ThinBuffer::new(module.module_llvm.context, true, emit_summary);
|
||||||
|
let buffer = ThinBuffer::new(&module.module_llvm.context);
|
||||||
|
(name, buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prepare "thin" LTO to get run on these modules.
|
||||||
|
///
|
||||||
|
/// The general structure of ThinLTO is quite different from the structure of
|
||||||
|
/// "fat" LTO above. With "fat" LTO all LLVM modules in question are merged into
|
||||||
|
/// one giant LLVM module, and then we run more optimization passes over this
|
||||||
|
/// big module after internalizing most symbols. Thin LTO, on the other hand,
|
||||||
|
/// avoid this large bottleneck through more targeted optimization.
|
||||||
|
///
|
||||||
|
/// At a high level Thin LTO looks like:
|
||||||
|
///
|
||||||
|
/// 1. Prepare a "summary" of each LLVM module in question which describes
|
||||||
|
/// the values inside, cost of the values, etc.
|
||||||
|
/// 2. Merge the summaries of all modules in question into one "index"
|
||||||
|
/// 3. Perform some global analysis on this index
|
||||||
|
/// 4. For each module, use the index and analysis calculated previously to
|
||||||
|
/// perform local transformations on the module, for example inlining
|
||||||
|
/// small functions from other modules.
|
||||||
|
/// 5. Run thin-specific optimization passes over each module, and then code
|
||||||
|
/// generate everything at the end.
|
||||||
|
///
|
||||||
|
/// The summary for each module is intended to be quite cheap, and the global
|
||||||
|
/// index is relatively quite cheap to create as well. As a result, the goal of
|
||||||
|
/// ThinLTO is to reduce the bottleneck on LTO and enable LTO to be used in more
|
||||||
|
/// situations. For example one cheap optimization is that we can parallelize
|
||||||
|
/// all codegen modules, easily making use of all the cores on a machine.
|
||||||
|
///
|
||||||
|
/// With all that in mind, the function here is designed at specifically just
|
||||||
|
/// calculating the *index* for ThinLTO. This index will then be shared amongst
|
||||||
|
/// all of the `LtoModuleCodegen` units returned below and destroyed once
|
||||||
|
/// they all go out of scope.
|
||||||
|
fn thin_lto(
|
||||||
|
cgcx: &CodegenContext<GccCodegenBackend>,
|
||||||
|
_dcx: DiagCtxtHandle<'_>,
|
||||||
|
modules: Vec<(String, ThinBuffer)>,
|
||||||
|
serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
|
||||||
|
tmp_path: TempDir,
|
||||||
|
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
|
||||||
|
//symbols_below_threshold: &[*const libc::c_char],
|
||||||
|
) -> Result<(Vec<LtoModuleCodegen<GccCodegenBackend>>, Vec<WorkProduct>), FatalError> {
|
||||||
|
let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis");
|
||||||
|
info!("going for that thin, thin LTO");
|
||||||
|
|
||||||
|
/*let green_modules: FxHashMap<_, _> =
|
||||||
|
cached_modules.iter().map(|(_, wp)| (wp.cgu_name.clone(), wp.clone())).collect();*/
|
||||||
|
|
||||||
|
let full_scope_len = modules.len() + serialized_modules.len() + cached_modules.len();
|
||||||
|
let mut thin_buffers = Vec::with_capacity(modules.len());
|
||||||
|
let mut module_names = Vec::with_capacity(full_scope_len);
|
||||||
|
//let mut thin_modules = Vec::with_capacity(full_scope_len);
|
||||||
|
|
||||||
|
for (i, (name, buffer)) in modules.into_iter().enumerate() {
|
||||||
|
info!("local module: {} - {}", i, name);
|
||||||
|
let cname = CString::new(name.as_bytes()).unwrap();
|
||||||
|
/*thin_modules.push(llvm::ThinLTOModule {
|
||||||
|
identifier: cname.as_ptr(),
|
||||||
|
data: buffer.data().as_ptr(),
|
||||||
|
len: buffer.data().len(),
|
||||||
|
});*/
|
||||||
|
thin_buffers.push(buffer);
|
||||||
|
module_names.push(cname);
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: All upstream crates are deserialized internally in the
|
||||||
|
// function below to extract their summary and modules. Note that
|
||||||
|
// unlike the loop above we *must* decode and/or read something
|
||||||
|
// here as these are all just serialized files on disk. An
|
||||||
|
// improvement, however, to make here would be to store the
|
||||||
|
// module summary separately from the actual module itself. Right
|
||||||
|
// now this is store in one large bitcode file, and the entire
|
||||||
|
// file is deflate-compressed. We could try to bypass some of the
|
||||||
|
// decompression by storing the index uncompressed and only
|
||||||
|
// lazily decompressing the bytecode if necessary.
|
||||||
|
//
|
||||||
|
// Note that truly taking advantage of this optimization will
|
||||||
|
// likely be further down the road. We'd have to implement
|
||||||
|
// incremental ThinLTO first where we could actually avoid
|
||||||
|
// looking at upstream modules entirely sometimes (the contents,
|
||||||
|
// we must always unconditionally look at the index).
|
||||||
|
let mut serialized = Vec::with_capacity(serialized_modules.len() + cached_modules.len());
|
||||||
|
|
||||||
|
let cached_modules =
|
||||||
|
cached_modules.into_iter().map(|(sm, wp)| (sm, CString::new(wp.cgu_name).unwrap()));
|
||||||
|
|
||||||
|
for (module, name) in serialized_modules.into_iter().chain(cached_modules) {
|
||||||
|
info!("upstream or cached module {:?}", name);
|
||||||
|
/*thin_modules.push(llvm::ThinLTOModule {
|
||||||
|
identifier: name.as_ptr(),
|
||||||
|
data: module.data().as_ptr(),
|
||||||
|
len: module.data().len(),
|
||||||
|
});*/
|
||||||
|
|
||||||
|
match module {
|
||||||
|
SerializedModule::Local(_) => {
|
||||||
|
//let path = module_buffer.0.to_str().expect("path");
|
||||||
|
//let my_path = PathBuf::from(path);
|
||||||
|
//let exists = my_path.exists();
|
||||||
|
/*module.module_llvm.should_combine_object_files = true;
|
||||||
|
module
|
||||||
|
.module_llvm
|
||||||
|
.context
|
||||||
|
.add_driver_option(module_buffer.0.to_str().expect("path"));*/
|
||||||
|
}
|
||||||
|
SerializedModule::FromRlib(_) => unimplemented!("from rlib"),
|
||||||
|
SerializedModule::FromUncompressedFile(_) => {
|
||||||
|
unimplemented!("from uncompressed file")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serialized.push(module);
|
||||||
|
module_names.push(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity check
|
||||||
|
//assert_eq!(thin_modules.len(), module_names.len());
|
||||||
|
|
||||||
|
// Delegate to the C++ bindings to create some data here. Once this is a
|
||||||
|
// tried-and-true interface we may wish to try to upstream some of this
|
||||||
|
// to LLVM itself, right now we reimplement a lot of what they do
|
||||||
|
// upstream...
|
||||||
|
/*let data = llvm::LLVMRustCreateThinLTOData(
|
||||||
|
thin_modules.as_ptr(),
|
||||||
|
thin_modules.len() as u32,
|
||||||
|
symbols_below_threshold.as_ptr(),
|
||||||
|
symbols_below_threshold.len() as u32,
|
||||||
|
)
|
||||||
|
.ok_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext))?;
|
||||||
|
*/
|
||||||
|
|
||||||
|
let data = ThinData; //(Arc::new(tmp_path))/*(data)*/;
|
||||||
|
|
||||||
|
info!("thin LTO data created");
|
||||||
|
|
||||||
|
/*let (key_map_path, prev_key_map, curr_key_map) =
|
||||||
|
if let Some(ref incr_comp_session_dir) = cgcx.incr_comp_session_dir {
|
||||||
|
let path = incr_comp_session_dir.join(THIN_LTO_KEYS_INCR_COMP_FILE_NAME);
|
||||||
|
// If the previous file was deleted, or we get an IO error
|
||||||
|
// reading the file, then we'll just use `None` as the
|
||||||
|
// prev_key_map, which will force the code to be recompiled.
|
||||||
|
let prev =
|
||||||
|
if path.exists() { ThinLTOKeysMap::load_from_file(&path).ok() } else { None };
|
||||||
|
let curr = ThinLTOKeysMap::from_thin_lto_modules(&data, &thin_modules, &module_names);
|
||||||
|
(Some(path), prev, curr)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// If we don't compile incrementally, we don't need to load the
|
||||||
|
// import data from LLVM.
|
||||||
|
assert!(green_modules.is_empty());
|
||||||
|
let curr = ThinLTOKeysMap::default();
|
||||||
|
(None, None, curr)
|
||||||
|
};
|
||||||
|
info!("thin LTO cache key map loaded");
|
||||||
|
info!("prev_key_map: {:#?}", prev_key_map);
|
||||||
|
info!("curr_key_map: {:#?}", curr_key_map);*/
|
||||||
|
|
||||||
|
// Throw our data in an `Arc` as we'll be sharing it across threads. We
|
||||||
|
// also put all memory referenced by the C++ data (buffers, ids, etc)
|
||||||
|
// into the arc as well. After this we'll create a thin module
|
||||||
|
// codegen per module in this data.
|
||||||
|
let shared =
|
||||||
|
Arc::new(ThinShared { data, thin_buffers, serialized_modules: serialized, module_names });
|
||||||
|
|
||||||
|
let copy_jobs = vec![];
|
||||||
|
let mut opt_jobs = vec![];
|
||||||
|
|
||||||
|
info!("checking which modules can be-reused and which have to be re-optimized.");
|
||||||
|
for (module_index, module_name) in shared.module_names.iter().enumerate() {
|
||||||
|
let module_name = module_name_to_str(module_name);
|
||||||
|
/*if let (Some(prev_key_map), true) =
|
||||||
|
(prev_key_map.as_ref(), green_modules.contains_key(module_name))
|
||||||
|
{
|
||||||
|
assert!(cgcx.incr_comp_session_dir.is_some());
|
||||||
|
|
||||||
|
// If a module exists in both the current and the previous session,
|
||||||
|
// and has the same LTO cache key in both sessions, then we can re-use it
|
||||||
|
if prev_key_map.keys.get(module_name) == curr_key_map.keys.get(module_name) {
|
||||||
|
let work_product = green_modules[module_name].clone();
|
||||||
|
copy_jobs.push(work_product);
|
||||||
|
info!(" - {}: re-used", module_name);
|
||||||
|
assert!(cgcx.incr_comp_session_dir.is_some());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
info!(" - {}: re-compiled", module_name);
|
||||||
|
opt_jobs
|
||||||
|
.push(LtoModuleCodegen::Thin(ThinModule { shared: shared.clone(), idx: module_index }));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the current ThinLTO import information for the next compilation
|
||||||
|
// session, overwriting the previous serialized data (if any).
|
||||||
|
/*if let Some(path) = key_map_path {
|
||||||
|
if let Err(err) = curr_key_map.save_to_file(&path) {
|
||||||
|
return Err(write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err }));
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
// NOTE: save the temporary directory used by LTO so that it gets deleted after linking instead
|
||||||
|
// of now.
|
||||||
|
//module.module_llvm.temp_dir = Some(tmp_path);
|
||||||
|
// TODO: save the directory so that it gets deleted later.
|
||||||
|
std::mem::forget(tmp_path);
|
||||||
|
|
||||||
|
Ok((opt_jobs, copy_jobs))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn optimize_thin_module(
|
||||||
|
thin_module: ThinModule<GccCodegenBackend>,
|
||||||
|
_cgcx: &CodegenContext<GccCodegenBackend>,
|
||||||
|
) -> Result<ModuleCodegen<GccContext>, FatalError> {
|
||||||
|
//let dcx = cgcx.create_dcx();
|
||||||
|
|
||||||
|
//let module_name = &thin_module.shared.module_names[thin_module.idx];
|
||||||
|
/*let tm_factory_config = TargetMachineFactoryConfig::new(cgcx, module_name.to_str().unwrap());
|
||||||
|
let tm = (cgcx.tm_factory)(tm_factory_config).map_err(|e| write::llvm_err(&dcx, e))?;*/
|
||||||
|
|
||||||
|
// Right now the implementation we've got only works over serialized
|
||||||
|
// modules, so we create a fresh new LLVM context and parse the module
|
||||||
|
// into that context. One day, however, we may do this for upstream
|
||||||
|
// crates but for locally codegened modules we may be able to reuse
|
||||||
|
// that LLVM Context and Module.
|
||||||
|
//let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
|
||||||
|
//let llmod_raw = parse_module(llcx, module_name, thin_module.data(), &dcx)? as *const _;
|
||||||
|
let mut should_combine_object_files = false;
|
||||||
|
let context = match thin_module.shared.thin_buffers.get(thin_module.idx) {
|
||||||
|
Some(thin_buffer) => Arc::clone(&thin_buffer.context),
|
||||||
|
None => {
|
||||||
|
let context = Context::default();
|
||||||
|
let len = thin_module.shared.thin_buffers.len();
|
||||||
|
let module = &thin_module.shared.serialized_modules[thin_module.idx - len];
|
||||||
|
match *module {
|
||||||
|
SerializedModule::Local(ref module_buffer) => {
|
||||||
|
let path = module_buffer.0.to_str().expect("path");
|
||||||
|
context.add_driver_option(path);
|
||||||
|
should_combine_object_files = true;
|
||||||
|
/*module.module_llvm.should_combine_object_files = true;
|
||||||
|
module
|
||||||
|
.module_llvm
|
||||||
|
.context
|
||||||
|
.add_driver_option(module_buffer.0.to_str().expect("path"));*/
|
||||||
|
}
|
||||||
|
SerializedModule::FromRlib(_) => unimplemented!("from rlib"),
|
||||||
|
SerializedModule::FromUncompressedFile(_) => {
|
||||||
|
unimplemented!("from uncompressed file")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Arc::new(SyncContext::new(context))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let module = ModuleCodegen {
|
||||||
|
module_llvm: GccContext { context, should_combine_object_files, temp_dir: None },
|
||||||
|
name: thin_module.name().to_string(),
|
||||||
|
kind: ModuleKind::Regular,
|
||||||
|
};
|
||||||
|
/*{
|
||||||
|
let target = &*module.module_llvm.tm;
|
||||||
|
let llmod = module.module_llvm.llmod();
|
||||||
|
save_temp_bitcode(cgcx, &module, "thin-lto-input");
|
||||||
|
|
||||||
|
// Up next comes the per-module local analyses that we do for Thin LTO.
|
||||||
|
// Each of these functions is basically copied from the LLVM
|
||||||
|
// implementation and then tailored to suit this implementation. Ideally
|
||||||
|
// each of these would be supported by upstream LLVM but that's perhaps
|
||||||
|
// a patch for another day!
|
||||||
|
//
|
||||||
|
// You can find some more comments about these functions in the LLVM
|
||||||
|
// bindings we've got (currently `PassWrapper.cpp`)
|
||||||
|
{
|
||||||
|
let _timer =
|
||||||
|
cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name());
|
||||||
|
if !llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target) {
|
||||||
|
return Err(write::llvm_err(&dcx, LlvmError::PrepareThinLtoModule));
|
||||||
|
}
|
||||||
|
save_temp_bitcode(cgcx, &module, "thin-lto-after-rename");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let _timer = cgcx
|
||||||
|
.prof
|
||||||
|
.generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name());
|
||||||
|
if !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) {
|
||||||
|
return Err(write::llvm_err(&dcx, LlvmError::PrepareThinLtoModule));
|
||||||
|
}
|
||||||
|
save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let _timer = cgcx
|
||||||
|
.prof
|
||||||
|
.generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name());
|
||||||
|
if !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) {
|
||||||
|
return Err(write::llvm_err(&dcx, LlvmError::PrepareThinLtoModule));
|
||||||
|
}
|
||||||
|
save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize");
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let _timer =
|
||||||
|
cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_import", thin_module.name());
|
||||||
|
if !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod, target) {
|
||||||
|
return Err(write::llvm_err(&dcx, LlvmError::PrepareThinLtoModule));
|
||||||
|
}
|
||||||
|
save_temp_bitcode(cgcx, &module, "thin-lto-after-import");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Alright now that we've done everything related to the ThinLTO
|
||||||
|
// analysis it's time to run some optimizations! Here we use the same
|
||||||
|
// `run_pass_manager` as the "fat" LTO above except that we tell it to
|
||||||
|
// populate a thin-specific pass manager, which presumably LLVM treats a
|
||||||
|
// little differently.
|
||||||
|
{
|
||||||
|
info!("running thin lto passes over {}", module.name);
|
||||||
|
run_pass_manager(cgcx, &dcx, &mut module, true)?;
|
||||||
|
save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
Ok(module)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ThinBuffer {
|
||||||
|
context: Arc<SyncContext>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: check if this makes sense to make ThinBuffer Send and Sync.
|
||||||
|
unsafe impl Send for ThinBuffer {}
|
||||||
|
unsafe impl Sync for ThinBuffer {}
|
||||||
|
|
||||||
|
impl ThinBuffer {
|
||||||
|
pub(crate) fn new(context: &Arc<SyncContext>) -> Self {
|
||||||
|
Self { context: Arc::clone(context) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ThinBufferMethods for ThinBuffer {
|
||||||
|
fn data(&self) -> &[u8] {
|
||||||
|
&[]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn thin_link_data(&self) -> &[u8] {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ThinData; //(Arc<TempDir>);
|
||||||
|
|
||||||
|
fn module_name_to_str(c_str: &CStr) -> &str {
|
||||||
|
c_str.to_str().unwrap_or_else(|e| {
|
||||||
|
bug!("Encountered non-utf8 GCC module name `{}`: {}", c_str.to_string_lossy(), e)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -31,6 +31,7 @@ pub(crate) unsafe fn codegen(
|
||||||
|
|
||||||
// NOTE: Only generate object files with GIMPLE when this environment variable is set for
|
// NOTE: Only generate object files with GIMPLE when this environment variable is set for
|
||||||
// now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit).
|
// now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit).
|
||||||
|
// TODO: remove this environment variable.
|
||||||
let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1");
|
let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1");
|
||||||
|
|
||||||
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
|
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
|
||||||
|
@ -56,6 +57,8 @@ pub(crate) unsafe fn codegen(
|
||||||
.generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name);
|
.generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name);
|
||||||
context.add_command_line_option("-flto=auto");
|
context.add_command_line_option("-flto=auto");
|
||||||
context.add_command_line_option("-flto-partition=one");
|
context.add_command_line_option("-flto-partition=one");
|
||||||
|
// TODO: remove since we don't want fat objects when it is for Bitcode only.
|
||||||
|
context.add_command_line_option("-ffat-lto-objects");
|
||||||
context
|
context
|
||||||
.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
|
.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
|
||||||
}
|
}
|
||||||
|
@ -104,7 +107,7 @@ pub(crate) unsafe fn codegen(
|
||||||
// FIXME(antoyo): segfault in dump_reproducer_to_file() might be caused by
|
// FIXME(antoyo): segfault in dump_reproducer_to_file() might be caused by
|
||||||
// transmuting an rvalue to an lvalue.
|
// transmuting an rvalue to an lvalue.
|
||||||
// Segfault is actually in gcc::jit::reproducer::get_identifier_as_lvalue
|
// Segfault is actually in gcc::jit::reproducer::get_identifier_as_lvalue
|
||||||
context.dump_reproducer_to_file(&format!("/tmp/reproducers/{}.c", module.name));
|
context.dump_reproducer_to_file(format!("/tmp/reproducers/{}.c", module.name));
|
||||||
println!("Dumped reproducer {}", module.name);
|
println!("Dumped reproducer {}", module.name);
|
||||||
}
|
}
|
||||||
if env::var("CG_GCCJIT_DUMP_TO_FILE").as_deref() == Ok("1") {
|
if env::var("CG_GCCJIT_DUMP_TO_FILE").as_deref() == Ok("1") {
|
||||||
|
@ -113,17 +116,20 @@ pub(crate) unsafe fn codegen(
|
||||||
context.set_debug_info(true);
|
context.set_debug_info(true);
|
||||||
context.dump_to_file(path, true);
|
context.dump_to_file(path, true);
|
||||||
}
|
}
|
||||||
if should_combine_object_files && fat_lto {
|
if should_combine_object_files {
|
||||||
context.add_command_line_option("-flto=auto");
|
if fat_lto {
|
||||||
context.add_command_line_option("-flto-partition=one");
|
context.add_command_line_option("-flto=auto");
|
||||||
|
context.add_command_line_option("-flto-partition=one");
|
||||||
|
|
||||||
|
// NOTE: without -fuse-linker-plugin, we get the following error:
|
||||||
|
// lto1: internal compiler error: decompressed stream: Destination buffer is too small
|
||||||
|
context.add_driver_option("-fuse-linker-plugin");
|
||||||
|
}
|
||||||
|
|
||||||
context.add_driver_option("-Wl,-r");
|
context.add_driver_option("-Wl,-r");
|
||||||
// NOTE: we need -nostdlib, otherwise, we get the following error:
|
// NOTE: we need -nostdlib, otherwise, we get the following error:
|
||||||
// /usr/bin/ld: cannot find -lgcc_s: No such file or directory
|
// /usr/bin/ld: cannot find -lgcc_s: No such file or directory
|
||||||
context.add_driver_option("-nostdlib");
|
context.add_driver_option("-nostdlib");
|
||||||
// NOTE: without -fuse-linker-plugin, we get the following error:
|
|
||||||
// lto1: internal compiler error: decompressed stream: Destination buffer is too small
|
|
||||||
context.add_driver_option("-fuse-linker-plugin");
|
|
||||||
|
|
||||||
// NOTE: this doesn't actually generate an executable. With the above flags, it combines the .o files together in another .o.
|
// NOTE: this doesn't actually generate an executable. With the above flags, it combines the .o files together in another .o.
|
||||||
context.compile_to_file(
|
context.compile_to_file(
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use gccjit::{FunctionType, GlobalKind};
|
use gccjit::{CType, FunctionType, GlobalKind};
|
||||||
use rustc_codegen_ssa::base::maybe_create_entry_wrapper;
|
use rustc_codegen_ssa::base::maybe_create_entry_wrapper;
|
||||||
use rustc_codegen_ssa::mono_item::MonoItemExt;
|
use rustc_codegen_ssa::mono_item::MonoItemExt;
|
||||||
use rustc_codegen_ssa::traits::DebugInfoMethods;
|
use rustc_codegen_ssa::traits::DebugInfoMethods;
|
||||||
|
@ -18,8 +19,8 @@ use rustc_target::spec::PanicStrategy;
|
||||||
|
|
||||||
use crate::builder::Builder;
|
use crate::builder::Builder;
|
||||||
use crate::context::CodegenCx;
|
use crate::context::CodegenCx;
|
||||||
use crate::GccContext;
|
|
||||||
use crate::{gcc_util, new_context, LockedTargetInfo};
|
use crate::{gcc_util, new_context, LockedTargetInfo};
|
||||||
|
use crate::{GccContext, SyncContext};
|
||||||
|
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
pub fn visibility_to_gcc(linkage: Visibility) -> gccjit::Visibility {
|
pub fn visibility_to_gcc(linkage: Visibility) -> gccjit::Visibility {
|
||||||
|
@ -135,7 +136,7 @@ pub fn compile_codegen_unit(
|
||||||
|
|
||||||
let target_cpu = gcc_util::target_cpu(tcx.sess);
|
let target_cpu = gcc_util::target_cpu(tcx.sess);
|
||||||
if target_cpu != "generic" {
|
if target_cpu != "generic" {
|
||||||
context.add_command_line_option(&format!("-march={}", target_cpu));
|
context.add_command_line_option(format!("-march={}", target_cpu));
|
||||||
}
|
}
|
||||||
|
|
||||||
if tcx
|
if tcx
|
||||||
|
@ -181,7 +182,24 @@ pub fn compile_codegen_unit(
|
||||||
context.set_allow_unreachable_blocks(true);
|
context.set_allow_unreachable_blocks(true);
|
||||||
|
|
||||||
{
|
{
|
||||||
let cx = CodegenCx::new(&context, cgu, tcx, target_info.supports_128bit_int());
|
// TODO: to make it less error-prone (calling get_target_info() will add the flag
|
||||||
|
// -fsyntax-only), forbid the compilation when get_target_info() is called on a
|
||||||
|
// context.
|
||||||
|
let f16_type_supported = target_info.supports_target_dependent_type(CType::Float16);
|
||||||
|
let f32_type_supported = target_info.supports_target_dependent_type(CType::Float32);
|
||||||
|
let f64_type_supported = target_info.supports_target_dependent_type(CType::Float64);
|
||||||
|
let f128_type_supported = target_info.supports_target_dependent_type(CType::Float128);
|
||||||
|
// TODO: improve this to avoid passing that many arguments.
|
||||||
|
let cx = CodegenCx::new(
|
||||||
|
&context,
|
||||||
|
cgu,
|
||||||
|
tcx,
|
||||||
|
target_info.supports_128bit_int(),
|
||||||
|
f16_type_supported,
|
||||||
|
f32_type_supported,
|
||||||
|
f64_type_supported,
|
||||||
|
f128_type_supported,
|
||||||
|
);
|
||||||
|
|
||||||
let mono_items = cgu.items_in_deterministic_order(tcx);
|
let mono_items = cgu.items_in_deterministic_order(tcx);
|
||||||
for &(mono_item, data) in &mono_items {
|
for &(mono_item, data) in &mono_items {
|
||||||
|
@ -205,7 +223,11 @@ pub fn compile_codegen_unit(
|
||||||
|
|
||||||
ModuleCodegen {
|
ModuleCodegen {
|
||||||
name: cgu_name.to_string(),
|
name: cgu_name.to_string(),
|
||||||
module_llvm: GccContext { context, should_combine_object_files: false, temp_dir: None },
|
module_llvm: GccContext {
|
||||||
|
context: Arc::new(SyncContext::new(context)),
|
||||||
|
should_combine_object_files: false,
|
||||||
|
temp_dir: None,
|
||||||
|
},
|
||||||
kind: ModuleKind::Regular,
|
kind: ModuleKind::Regular,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ use rustc_middle::ty::layout::{
|
||||||
FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError, LayoutOfHelpers,
|
FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError, LayoutOfHelpers,
|
||||||
TyAndLayout,
|
TyAndLayout,
|
||||||
};
|
};
|
||||||
use rustc_middle::ty::{ParamEnv, Ty, TyCtxt, Instance};
|
use rustc_middle::ty::{Instance, ParamEnv, Ty, TyCtxt};
|
||||||
use rustc_span::def_id::DefId;
|
use rustc_span::def_id::DefId;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use rustc_target::abi::{
|
use rustc_target::abi::{
|
||||||
|
@ -68,7 +68,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
src: RValue<'gcc>,
|
src: RValue<'gcc>,
|
||||||
order: AtomicOrdering,
|
order: AtomicOrdering,
|
||||||
) -> RValue<'gcc> {
|
) -> RValue<'gcc> {
|
||||||
let size = src.get_type().get_size();
|
let size = get_maybe_pointer_size(src);
|
||||||
|
|
||||||
let func = self.current_func();
|
let func = self.current_func();
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
failure_order: AtomicOrdering,
|
failure_order: AtomicOrdering,
|
||||||
weak: bool,
|
weak: bool,
|
||||||
) -> RValue<'gcc> {
|
) -> RValue<'gcc> {
|
||||||
let size = src.get_type().get_size();
|
let size = get_maybe_pointer_size(src);
|
||||||
let compare_exchange =
|
let compare_exchange =
|
||||||
self.context.get_builtin_function(&format!("__atomic_compare_exchange_{}", size));
|
self.context.get_builtin_function(&format!("__atomic_compare_exchange_{}", size));
|
||||||
let order = self.context.new_rvalue_from_int(self.i32_type, order.to_gcc());
|
let order = self.context.new_rvalue_from_int(self.i32_type, order.to_gcc());
|
||||||
|
@ -153,7 +153,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
// NOTE: not sure why, but we have the wrong type here.
|
// NOTE: not sure why, but we have the wrong type here.
|
||||||
let int_type = compare_exchange.get_param(2).to_rvalue().get_type();
|
let int_type = compare_exchange.get_param(2).to_rvalue().get_type();
|
||||||
let src = self.context.new_cast(self.location, src, int_type);
|
let src = self.context.new_bitcast(self.location, src, int_type);
|
||||||
self.context.new_call(
|
self.context.new_call(
|
||||||
self.location,
|
self.location,
|
||||||
compare_exchange,
|
compare_exchange,
|
||||||
|
@ -190,8 +190,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let casted_args: Vec<_> = param_types
|
let casted_args: Vec<_> = param_types
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.zip(args.iter())
|
.zip(args.iter())
|
||||||
.enumerate()
|
.map(|(expected_ty, &actual_val)| {
|
||||||
.map(|(_i, (expected_ty, &actual_val))| {
|
|
||||||
let actual_ty = actual_val.get_type();
|
let actual_ty = actual_val.get_type();
|
||||||
if expected_ty != actual_ty {
|
if expected_ty != actual_ty {
|
||||||
self.bitcast(actual_val, expected_ty)
|
self.bitcast(actual_val, expected_ty)
|
||||||
|
@ -225,7 +224,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
let mut on_stack_param_indices = FxHashSet::default();
|
let mut on_stack_param_indices = FxHashSet::default();
|
||||||
if let Some(indices) = self.on_stack_params.borrow().get(&gcc_func) {
|
if let Some(indices) = self.on_stack_params.borrow().get(&gcc_func) {
|
||||||
on_stack_param_indices = indices.clone();
|
on_stack_param_indices.clone_from(indices);
|
||||||
}
|
}
|
||||||
|
|
||||||
if all_args_match {
|
if all_args_match {
|
||||||
|
@ -253,11 +252,26 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
{
|
{
|
||||||
self.context.new_cast(self.location, actual_val, expected_ty)
|
self.context.new_cast(self.location, actual_val, expected_ty)
|
||||||
} else if on_stack_param_indices.contains(&index) {
|
} else if on_stack_param_indices.contains(&index) {
|
||||||
actual_val.dereference(self.location).to_rvalue()
|
let ty = actual_val.get_type();
|
||||||
|
// It's possible that the value behind the pointer is actually not exactly
|
||||||
|
// the expected type, so to go around that, we add a cast before
|
||||||
|
// dereferencing the value.
|
||||||
|
if let Some(pointee_val) = ty.get_pointee()
|
||||||
|
&& pointee_val != expected_ty
|
||||||
|
{
|
||||||
|
let new_val = self.context.new_cast(
|
||||||
|
self.location,
|
||||||
|
actual_val,
|
||||||
|
expected_ty.make_pointer(),
|
||||||
|
);
|
||||||
|
new_val.dereference(self.location).to_rvalue()
|
||||||
|
} else {
|
||||||
|
actual_val.dereference(self.location).to_rvalue()
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
assert!(
|
assert!(
|
||||||
!((actual_ty.is_vector() && !expected_ty.is_vector())
|
(!expected_ty.is_vector() || actual_ty.is_vector())
|
||||||
|| (!actual_ty.is_vector() && expected_ty.is_vector())),
|
&& (expected_ty.is_vector() || !actual_ty.is_vector()),
|
||||||
"{:?} ({}) -> {:?} ({}), index: {:?}[{}]",
|
"{:?} ({}) -> {:?} ({}), index: {:?}[{}]",
|
||||||
actual_ty,
|
actual_ty,
|
||||||
actual_ty.is_vector(),
|
actual_ty.is_vector(),
|
||||||
|
@ -277,8 +291,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// NOTE: to take into account variadic functions.
|
// NOTE: to take into account variadic functions.
|
||||||
for i in casted_args.len()..args.len() {
|
for arg in args.iter().skip(casted_args.len()) {
|
||||||
casted_args.push(args[i]);
|
casted_args.push(*arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
Cow::Owned(casted_args)
|
Cow::Owned(casted_args)
|
||||||
|
@ -353,7 +367,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let function_address_names = self.function_address_names.borrow();
|
let function_address_names = self.function_address_names.borrow();
|
||||||
let original_function_name = function_address_names.get(&func_ptr);
|
let original_function_name = function_address_names.get(&func_ptr);
|
||||||
llvm::adjust_intrinsic_arguments(
|
llvm::adjust_intrinsic_arguments(
|
||||||
&self,
|
self,
|
||||||
gcc_func,
|
gcc_func,
|
||||||
args.into(),
|
args.into(),
|
||||||
&func_name,
|
&func_name,
|
||||||
|
@ -361,7 +375,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
let args_adjusted = args.len() != previous_arg_count;
|
let args_adjusted = args.len() != previous_arg_count;
|
||||||
let args = self.check_ptr_call("call", func_ptr, &*args);
|
let args = self.check_ptr_call("call", func_ptr, &args);
|
||||||
|
|
||||||
// gccjit requires to use the result of functions, even when it's not used.
|
// gccjit requires to use the result of functions, even when it's not used.
|
||||||
// That's why we assign the result to a local or call add_eval().
|
// That's why we assign the result to a local or call add_eval().
|
||||||
|
@ -373,7 +387,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
unsafe { RETURN_VALUE_COUNT += 1 };
|
unsafe { RETURN_VALUE_COUNT += 1 };
|
||||||
let return_value = self.cx.context.new_call_through_ptr(self.location, func_ptr, &args);
|
let return_value = self.cx.context.new_call_through_ptr(self.location, func_ptr, &args);
|
||||||
let return_value = llvm::adjust_intrinsic_return_value(
|
let return_value = llvm::adjust_intrinsic_return_value(
|
||||||
&self,
|
self,
|
||||||
return_value,
|
return_value,
|
||||||
&func_name,
|
&func_name,
|
||||||
&args,
|
&args,
|
||||||
|
@ -441,7 +455,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
self.block.add_assignment(
|
self.block.add_assignment(
|
||||||
self.location,
|
self.location,
|
||||||
result,
|
result,
|
||||||
self.cx.context.new_call(self.location, func, &args),
|
self.cx.context.new_call(self.location, func, args),
|
||||||
);
|
);
|
||||||
result.to_rvalue()
|
result.to_rvalue()
|
||||||
}
|
}
|
||||||
|
@ -596,7 +610,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
) -> RValue<'gcc> {
|
) -> RValue<'gcc> {
|
||||||
let try_block = self.current_func().new_block("try");
|
let try_block = self.current_func().new_block("try");
|
||||||
|
|
||||||
let current_block = self.block.clone();
|
let current_block = self.block;
|
||||||
self.block = try_block;
|
self.block = try_block;
|
||||||
let call = self.call(typ, fn_attrs, None, func, args, None, instance); // TODO(antoyo): use funclet here?
|
let call = self.call(typ, fn_attrs, None, func, args, None, instance); // TODO(antoyo): use funclet here?
|
||||||
self.block = current_block;
|
self.block = current_block;
|
||||||
|
@ -630,8 +644,9 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
then: Block<'gcc>,
|
then: Block<'gcc>,
|
||||||
catch: Block<'gcc>,
|
catch: Block<'gcc>,
|
||||||
_funclet: Option<&Funclet>,
|
_funclet: Option<&Funclet>,
|
||||||
|
instance: Option<Instance<'tcx>>,
|
||||||
) -> RValue<'gcc> {
|
) -> RValue<'gcc> {
|
||||||
let call_site = self.call(typ, fn_attrs, None, func, args, None);
|
let call_site = self.call(typ, fn_attrs, None, func, args, None, instance);
|
||||||
let condition = self.context.new_rvalue_from_int(self.bool_type, 1);
|
let condition = self.context.new_rvalue_from_int(self.bool_type, 1);
|
||||||
self.llbb().end_with_conditional(self.location, condition, then, catch);
|
self.llbb().end_with_conditional(self.location, condition, then, catch);
|
||||||
if let Some(_fn_abi) = fn_abi {
|
if let Some(_fn_abi) = fn_abi {
|
||||||
|
@ -749,6 +764,24 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
// FIXME(antoyo): this seems to produce the wrong result.
|
// FIXME(antoyo): this seems to produce the wrong result.
|
||||||
return self.context.new_call(self.location, fmodf, &[a, b]);
|
return self.context.new_call(self.location, fmodf, &[a, b]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
match self.cx.type_kind(a_type) {
|
||||||
|
TypeKind::Half | TypeKind::Float => {
|
||||||
|
let fmodf = self.context.get_builtin_function("fmodf");
|
||||||
|
return self.context.new_call(self.location, fmodf, &[a, b]);
|
||||||
|
}
|
||||||
|
TypeKind::Double => {
|
||||||
|
let fmod = self.context.get_builtin_function("fmod");
|
||||||
|
return self.context.new_call(self.location, fmod, &[a, b]);
|
||||||
|
}
|
||||||
|
TypeKind::FP128 => {
|
||||||
|
let fmodl = self.context.get_builtin_function("fmodl");
|
||||||
|
return self.context.new_call(self.location, fmodl, &[a, b]);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(vector_type) = a_type_unqualified.dyncast_vector() {
|
if let Some(vector_type) = a_type_unqualified.dyncast_vector() {
|
||||||
assert_eq!(a_type_unqualified, b.get_type().unqualified());
|
assert_eq!(a_type_unqualified, b.get_type().unqualified());
|
||||||
|
|
||||||
|
@ -903,11 +936,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
// TODO(antoyo): It might be better to return a LValue, but fixing the rustc API is non-trivial.
|
// TODO(antoyo): It might be better to return a LValue, but fixing the rustc API is non-trivial.
|
||||||
self.stack_var_count.set(self.stack_var_count.get() + 1);
|
self.stack_var_count.set(self.stack_var_count.get() + 1);
|
||||||
self.current_func()
|
self.current_func()
|
||||||
.new_local(
|
.new_local(self.location, ty, &format!("stack_var_{}", self.stack_var_count.get()))
|
||||||
self.location,
|
|
||||||
ty,
|
|
||||||
&format!("stack_var_{}", self.stack_var_count.get()),
|
|
||||||
)
|
|
||||||
.get_address(self.location)
|
.get_address(self.location)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -993,7 +1022,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let val = if let Some(_) = place.val.llextra {
|
let val = if place.val.llextra.is_some() {
|
||||||
// FIXME: Merge with the `else` below?
|
// FIXME: Merge with the `else` below?
|
||||||
OperandValue::Ref(place.val)
|
OperandValue::Ref(place.val)
|
||||||
} else if place.layout.is_gcc_immediate() {
|
} else if place.layout.is_gcc_immediate() {
|
||||||
|
@ -1125,7 +1154,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
// the following cast is required to avoid this error:
|
// the following cast is required to avoid this error:
|
||||||
// gcc_jit_context_new_call: mismatching types for argument 2 of function "__atomic_store_4": assignment to param arg1 (type: int) from loadedValue3577 (type: unsigned int __attribute__((aligned(4))))
|
// gcc_jit_context_new_call: mismatching types for argument 2 of function "__atomic_store_4": assignment to param arg1 (type: int) from loadedValue3577 (type: unsigned int __attribute__((aligned(4))))
|
||||||
let int_type = atomic_store.get_param(1).to_rvalue().get_type();
|
let int_type = atomic_store.get_param(1).to_rvalue().get_type();
|
||||||
let value = self.context.new_cast(self.location, value, int_type);
|
let value = self.context.new_bitcast(self.location, value, int_type);
|
||||||
self.llbb().add_eval(
|
self.llbb().add_eval(
|
||||||
self.location,
|
self.location,
|
||||||
self.context.new_call(self.location, atomic_store, &[ptr, value, ordering]),
|
self.context.new_call(self.location, atomic_store, &[ptr, value, ordering]),
|
||||||
|
@ -1172,7 +1201,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
// NOTE: due to opaque pointers now being used, we need to cast here.
|
// NOTE: due to opaque pointers now being used, we need to cast here.
|
||||||
let ptr = self.context.new_cast(self.location, ptr, typ.make_pointer());
|
let ptr = self.context.new_cast(self.location, ptr, typ.make_pointer());
|
||||||
// NOTE: array indexing is always considered in bounds in GCC (TODO(antoyo): to be verified).
|
// NOTE: array indexing is always considered in bounds in GCC (TODO(antoyo): to be verified).
|
||||||
let mut indices = indices.into_iter();
|
let mut indices = indices.iter();
|
||||||
let index = indices.next().expect("first index in inbounds_gep");
|
let index = indices.next().expect("first index in inbounds_gep");
|
||||||
let mut result = self.context.new_array_access(self.location, ptr, *index);
|
let mut result = self.context.new_array_access(self.location, ptr, *index);
|
||||||
for index in indices {
|
for index in indices {
|
||||||
|
@ -1589,7 +1618,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
src: RValue<'gcc>,
|
src: RValue<'gcc>,
|
||||||
order: AtomicOrdering,
|
order: AtomicOrdering,
|
||||||
) -> RValue<'gcc> {
|
) -> RValue<'gcc> {
|
||||||
let size = src.get_type().get_size();
|
let size = get_maybe_pointer_size(src);
|
||||||
let name = match op {
|
let name = match op {
|
||||||
AtomicRmwBinOp::AtomicXchg => format!("__atomic_exchange_{}", size),
|
AtomicRmwBinOp::AtomicXchg => format!("__atomic_exchange_{}", size),
|
||||||
AtomicRmwBinOp::AtomicAdd => format!("__atomic_fetch_add_{}", size),
|
AtomicRmwBinOp::AtomicAdd => format!("__atomic_fetch_add_{}", size),
|
||||||
|
@ -1620,7 +1649,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
let dst = self.context.new_cast(self.location, dst, volatile_void_ptr_type);
|
let dst = self.context.new_cast(self.location, dst, volatile_void_ptr_type);
|
||||||
// FIXME(antoyo): not sure why, but we have the wrong type here.
|
// FIXME(antoyo): not sure why, but we have the wrong type here.
|
||||||
let new_src_type = atomic_function.get_param(1).to_rvalue().get_type();
|
let new_src_type = atomic_function.get_param(1).to_rvalue().get_type();
|
||||||
let src = self.context.new_cast(self.location, src, new_src_type);
|
let src = self.context.new_bitcast(self.location, src, new_src_type);
|
||||||
let res = self.context.new_call(self.location, atomic_function, &[dst, src, order]);
|
let res = self.context.new_call(self.location, atomic_function, &[dst, src, order]);
|
||||||
self.context.new_cast(self.location, res, src.get_type())
|
self.context.new_cast(self.location, res, src.get_type())
|
||||||
}
|
}
|
||||||
|
@ -1661,7 +1690,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
_instance: Option<Instance<'tcx>>,
|
_instance: Option<Instance<'tcx>>,
|
||||||
) -> RValue<'gcc> {
|
) -> RValue<'gcc> {
|
||||||
// FIXME(antoyo): remove when having a proper API.
|
// FIXME(antoyo): remove when having a proper API.
|
||||||
let gcc_func = unsafe { std::mem::transmute(func) };
|
let gcc_func = unsafe { std::mem::transmute::<RValue<'gcc>, Function<'gcc>>(func) };
|
||||||
let call = if self.functions.borrow().values().any(|value| *value == gcc_func) {
|
let call = if self.functions.borrow().values().any(|value| *value == gcc_func) {
|
||||||
self.function_call(func, args, funclet)
|
self.function_call(func, args, funclet)
|
||||||
} else {
|
} else {
|
||||||
|
@ -1676,11 +1705,6 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
fn zext(&mut self, value: RValue<'gcc>, dest_typ: Type<'gcc>) -> RValue<'gcc> {
|
fn zext(&mut self, value: RValue<'gcc>, dest_typ: Type<'gcc>) -> RValue<'gcc> {
|
||||||
// FIXME(antoyo): this does not zero-extend.
|
// FIXME(antoyo): this does not zero-extend.
|
||||||
if value.get_type().is_bool() && dest_typ.is_i8(&self.cx) {
|
|
||||||
// FIXME(antoyo): hack because base::from_immediate converts i1 to i8.
|
|
||||||
// Fix the code in codegen_ssa::base::from_immediate.
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
self.gcc_int_cast(value, dest_typ)
|
self.gcc_int_cast(value, dest_typ)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2049,7 +2073,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
self.context.new_rvalue_from_vector(self.location, mask_type, &vector_elements);
|
self.context.new_rvalue_from_vector(self.location, mask_type, &vector_elements);
|
||||||
let shifted = self.context.new_rvalue_vector_perm(self.location, res, res, mask);
|
let shifted = self.context.new_rvalue_vector_perm(self.location, res, res, mask);
|
||||||
shift *= 2;
|
shift *= 2;
|
||||||
res = op(res, shifted, &self.context);
|
res = op(res, shifted, self.context);
|
||||||
}
|
}
|
||||||
self.context
|
self.context
|
||||||
.new_vector_access(self.location, res, self.context.new_rvalue_zero(self.int_type))
|
.new_vector_access(self.location, res, self.context.new_rvalue_zero(self.int_type))
|
||||||
|
@ -2065,7 +2089,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn vector_reduce_op(&mut self, src: RValue<'gcc>, op: BinaryOp) -> RValue<'gcc> {
|
pub fn vector_reduce_op(&mut self, src: RValue<'gcc>, op: BinaryOp) -> RValue<'gcc> {
|
||||||
let loc = self.location.clone();
|
let loc = self.location;
|
||||||
self.vector_reduce(src, |a, b, context| context.new_binary_op(loc, op, a.get_type(), a, b))
|
self.vector_reduce(src, |a, b, context| context.new_binary_op(loc, op, a.get_type(), a, b))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2082,7 +2106,6 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let vector_type = src.get_type().unqualified().dyncast_vector().expect("vector type");
|
let vector_type = src.get_type().unqualified().dyncast_vector().expect("vector type");
|
||||||
let element_count = vector_type.get_num_units();
|
let element_count = vector_type.get_num_units();
|
||||||
(0..element_count)
|
(0..element_count)
|
||||||
.into_iter()
|
|
||||||
.map(|i| {
|
.map(|i| {
|
||||||
self.context
|
self.context
|
||||||
.new_vector_access(
|
.new_vector_access(
|
||||||
|
@ -2113,7 +2136,6 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let vector_type = src.get_type().unqualified().dyncast_vector().expect("vector type");
|
let vector_type = src.get_type().unqualified().dyncast_vector().expect("vector type");
|
||||||
let element_count = vector_type.get_num_units();
|
let element_count = vector_type.get_num_units();
|
||||||
(0..element_count)
|
(0..element_count)
|
||||||
.into_iter()
|
|
||||||
.map(|i| {
|
.map(|i| {
|
||||||
self.context
|
self.context
|
||||||
.new_vector_access(
|
.new_vector_access(
|
||||||
|
@ -2133,7 +2155,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
// Inspired by Hacker's Delight min implementation.
|
// Inspired by Hacker's Delight min implementation.
|
||||||
pub fn vector_reduce_min(&mut self, src: RValue<'gcc>) -> RValue<'gcc> {
|
pub fn vector_reduce_min(&mut self, src: RValue<'gcc>) -> RValue<'gcc> {
|
||||||
let loc = self.location.clone();
|
let loc = self.location;
|
||||||
self.vector_reduce(src, |a, b, context| {
|
self.vector_reduce(src, |a, b, context| {
|
||||||
let differences_or_zeros = difference_or_zero(loc, a, b, context);
|
let differences_or_zeros = difference_or_zero(loc, a, b, context);
|
||||||
context.new_binary_op(loc, BinaryOp::Plus, b.get_type(), b, differences_or_zeros)
|
context.new_binary_op(loc, BinaryOp::Plus, b.get_type(), b, differences_or_zeros)
|
||||||
|
@ -2142,7 +2164,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
// Inspired by Hacker's Delight max implementation.
|
// Inspired by Hacker's Delight max implementation.
|
||||||
pub fn vector_reduce_max(&mut self, src: RValue<'gcc>) -> RValue<'gcc> {
|
pub fn vector_reduce_max(&mut self, src: RValue<'gcc>) -> RValue<'gcc> {
|
||||||
let loc = self.location.clone();
|
let loc = self.location;
|
||||||
self.vector_reduce(src, |a, b, context| {
|
self.vector_reduce(src, |a, b, context| {
|
||||||
let differences_or_zeros = difference_or_zero(loc, a, b, context);
|
let differences_or_zeros = difference_or_zero(loc, a, b, context);
|
||||||
context.new_binary_op(loc, BinaryOp::Minus, a.get_type(), a, differences_or_zeros)
|
context.new_binary_op(loc, BinaryOp::Minus, a.get_type(), a, differences_or_zeros)
|
||||||
|
@ -2337,7 +2359,13 @@ impl<'tcx> HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> {
|
||||||
|
|
||||||
impl<'tcx> HasTargetSpec for Builder<'_, '_, 'tcx> {
|
impl<'tcx> HasTargetSpec for Builder<'_, '_, 'tcx> {
|
||||||
fn target_spec(&self) -> &Target {
|
fn target_spec(&self) -> &Target {
|
||||||
&self.cx.target_spec()
|
self.cx.target_spec()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> HasWasmCAbiOpt for Builder<'_, '_, 'tcx> {
|
||||||
|
fn wasm_c_abi_opt(&self) -> WasmCAbi {
|
||||||
|
self.cx.wasm_c_abi_opt()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2422,3 +2450,19 @@ impl ToGccOrdering for AtomicOrdering {
|
||||||
ordering as i32
|
ordering as i32
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Needed because gcc 12 `get_size()` doesn't work on pointers.
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
fn get_maybe_pointer_size(value: RValue<'_>) -> u32 {
|
||||||
|
value.get_type().get_size()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "master"))]
|
||||||
|
fn get_maybe_pointer_size(value: RValue<'_>) -> u32 {
|
||||||
|
let type_ = value.get_type();
|
||||||
|
if type_.get_pointee().is_some() {
|
||||||
|
std::mem::size_of::<*const ()>() as _
|
||||||
|
} else {
|
||||||
|
type_.get_size()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||||
|
|
||||||
let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
|
let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
|
||||||
|
|
||||||
let func = if let Some(_func) = cx.get_declared_value(&sym) {
|
let func = if let Some(_func) = cx.get_declared_value(sym) {
|
||||||
// FIXME(antoyo): we never reach this because get_declared_value only returns global variables
|
// FIXME(antoyo): we never reach this because get_declared_value only returns global variables
|
||||||
// and here we try to get a function.
|
// and here we try to get a function.
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -68,7 +68,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||||
}*/
|
}*/
|
||||||
} else {
|
} else {
|
||||||
cx.linkage.set(FunctionType::Extern);
|
cx.linkage.set(FunctionType::Extern);
|
||||||
let func = cx.declare_fn(&sym, &fn_abi);
|
let func = cx.declare_fn(sym, fn_abi);
|
||||||
|
|
||||||
attributes::from_fn_attrs(cx, func, instance);
|
attributes::from_fn_attrs(cx, func, instance);
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
|
|
||||||
fn global_string(&self, string: &str) -> LValue<'gcc> {
|
fn global_string(&self, string: &str) -> LValue<'gcc> {
|
||||||
// TODO(antoyo): handle non-null-terminated strings.
|
// TODO(antoyo): handle non-null-terminated strings.
|
||||||
let string = self.context.new_string_literal(&*string);
|
let string = self.context.new_string_literal(string);
|
||||||
let sym = self.generate_local_symbol_name("str");
|
let sym = self.generate_local_symbol_name("str");
|
||||||
let global = self.declare_private_global(&sym, self.val_ty(string));
|
let global = self.declare_private_global(&sym, self.val_ty(string));
|
||||||
global.global_set_initializer_rvalue(string);
|
global.global_set_initializer_rvalue(string);
|
||||||
|
@ -187,7 +187,8 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
return self
|
return self
|
||||||
.context
|
.context
|
||||||
.new_rvalue_from_double(ty, f32::from_bits(data as u32) as f64);
|
.new_rvalue_from_double(ty, f32::from_bits(data as u32) as f64);
|
||||||
} else if ty == self.double_type {
|
}
|
||||||
|
if ty == self.double_type {
|
||||||
return self.context.new_rvalue_from_double(ty, f64::from_bits(data as u64));
|
return self.context.new_rvalue_from_double(ty, f64::from_bits(data as u64));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -297,7 +298,7 @@ impl<'gcc, 'tcx> SignType<'gcc, 'tcx> for Type<'gcc> {
|
||||||
} else if self.is_ulonglong(cx) {
|
} else if self.is_ulonglong(cx) {
|
||||||
cx.longlong_type
|
cx.longlong_type
|
||||||
} else {
|
} else {
|
||||||
self.clone()
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -323,7 +324,7 @@ impl<'gcc, 'tcx> SignType<'gcc, 'tcx> for Type<'gcc> {
|
||||||
} else if self.is_longlong(cx) {
|
} else if self.is_longlong(cx) {
|
||||||
cx.ulonglong_type
|
cx.ulonglong_type
|
||||||
} else {
|
} else {
|
||||||
self.clone()
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -436,7 +437,7 @@ impl<'gcc, 'tcx> TypeReflection<'gcc, 'tcx> for Type<'gcc> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_vector(&self) -> bool {
|
fn is_vector(&self) -> bool {
|
||||||
let mut typ = self.clone();
|
let mut typ = *self;
|
||||||
loop {
|
loop {
|
||||||
if typ.dyncast_vector().is_some() {
|
if typ.dyncast_vector().is_some() {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -1,15 +1,16 @@
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
use gccjit::{FnAttribute, VarAttribute, Visibility};
|
use gccjit::{FnAttribute, VarAttribute, Visibility};
|
||||||
use gccjit::{Function, GlobalKind, LValue, RValue, ToRValue};
|
use gccjit::{Function, GlobalKind, LValue, RValue, ToRValue, Type};
|
||||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, DerivedTypeMethods, StaticMethods};
|
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, StaticMethods};
|
||||||
|
use rustc_hir::def::DefKind;
|
||||||
|
use rustc_middle::bug;
|
||||||
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
|
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
|
||||||
use rustc_middle::mir::interpret::{
|
use rustc_middle::mir::interpret::{
|
||||||
self, read_target_uint, ConstAllocation, ErrorHandled, Scalar as InterpScalar,
|
self, read_target_uint, ConstAllocation, ErrorHandled, Scalar as InterpScalar,
|
||||||
};
|
};
|
||||||
use rustc_middle::mir::mono::MonoItem;
|
|
||||||
use rustc_middle::span_bug;
|
use rustc_middle::span_bug;
|
||||||
use rustc_middle::ty::layout::LayoutOf;
|
use rustc_middle::ty::layout::LayoutOf;
|
||||||
use rustc_middle::ty::{self, Instance, Ty};
|
use rustc_middle::ty::{self, Instance};
|
||||||
use rustc_span::def_id::DefId;
|
use rustc_span::def_id::DefId;
|
||||||
use rustc_target::abi::{self, Align, HasDataLayout, Primitive, Size, WrappingRange};
|
use rustc_target::abi::{self, Align, HasDataLayout, Primitive, Size, WrappingRange};
|
||||||
|
|
||||||
|
@ -63,16 +64,15 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||||
global_value
|
global_value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(not(feature = "master"), allow(unused_mut))]
|
||||||
fn codegen_static(&self, def_id: DefId) {
|
fn codegen_static(&self, def_id: DefId) {
|
||||||
let attrs = self.tcx.codegen_fn_attrs(def_id);
|
let attrs = self.tcx.codegen_fn_attrs(def_id);
|
||||||
|
|
||||||
let value = match codegen_static_initializer(&self, def_id) {
|
let Ok((value, alloc)) = codegen_static_initializer(self, def_id) else {
|
||||||
Ok((value, _)) => value,
|
|
||||||
// Error has already been reported
|
// Error has already been reported
|
||||||
Err(_) => return,
|
return;
|
||||||
};
|
};
|
||||||
|
let alloc = alloc.inner();
|
||||||
let global = self.get_static(def_id);
|
|
||||||
|
|
||||||
// boolean SSA values are i1, but they have to be stored in i8 slots,
|
// boolean SSA values are i1, but they have to be stored in i8 slots,
|
||||||
// otherwise some LLVM optimization passes don't work as expected
|
// otherwise some LLVM optimization passes don't work as expected
|
||||||
|
@ -81,23 +81,25 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
};
|
};
|
||||||
|
|
||||||
let instance = Instance::mono(self.tcx, def_id);
|
let is_thread_local = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
||||||
let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all());
|
let global = self.get_static_inner(def_id, val_llty);
|
||||||
let gcc_type = self.layout_of(ty).gcc_type(self);
|
|
||||||
|
|
||||||
set_global_alignment(self, global, self.align_of(ty));
|
#[cfg(feature = "master")]
|
||||||
|
if global.to_rvalue().get_type() != val_llty {
|
||||||
|
global.to_rvalue().set_type(val_llty);
|
||||||
|
}
|
||||||
|
set_global_alignment(self, global, alloc.align);
|
||||||
|
|
||||||
let value = self.bitcast_if_needed(value, gcc_type);
|
|
||||||
global.global_set_initializer_rvalue(value);
|
global.global_set_initializer_rvalue(value);
|
||||||
|
|
||||||
// As an optimization, all shared statics which do not have interior
|
// As an optimization, all shared statics which do not have interior
|
||||||
// mutability are placed into read-only memory.
|
// mutability are placed into read-only memory.
|
||||||
if !self.tcx.static_mutability(def_id).unwrap().is_mut() && self.type_is_freeze(ty) {
|
if alloc.mutability.is_not() {
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
global.global_set_readonly();
|
global.global_set_readonly();
|
||||||
}
|
}
|
||||||
|
|
||||||
if attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
|
if is_thread_local {
|
||||||
// Do not allow LLVM to change the alignment of a TLS on macOS.
|
// Do not allow LLVM to change the alignment of a TLS on macOS.
|
||||||
//
|
//
|
||||||
// By default a global's alignment can be freely increased.
|
// By default a global's alignment can be freely increased.
|
||||||
|
@ -205,35 +207,49 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
|
|
||||||
pub fn get_static(&self, def_id: DefId) -> LValue<'gcc> {
|
pub fn get_static(&self, def_id: DefId) -> LValue<'gcc> {
|
||||||
let instance = Instance::mono(self.tcx, def_id);
|
let instance = Instance::mono(self.tcx, def_id);
|
||||||
let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
|
let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else { bug!() };
|
||||||
|
// Nested statics do not have a type, so pick a random type and let `define_static` figure out
|
||||||
|
// the gcc type from the actual evaluated initializer.
|
||||||
|
let gcc_type = if nested {
|
||||||
|
self.type_i8()
|
||||||
|
} else {
|
||||||
|
let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all());
|
||||||
|
self.layout_of(ty).gcc_type(self)
|
||||||
|
};
|
||||||
|
|
||||||
|
self.get_static_inner(def_id, gcc_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_static_inner(&self, def_id: DefId, gcc_type: Type<'gcc>) -> LValue<'gcc> {
|
||||||
|
let instance = Instance::mono(self.tcx, def_id);
|
||||||
if let Some(&global) = self.instances.borrow().get(&instance) {
|
if let Some(&global) = self.instances.borrow().get(&instance) {
|
||||||
|
trace!("used cached value");
|
||||||
return global;
|
return global;
|
||||||
}
|
}
|
||||||
|
|
||||||
let defined_in_current_codegen_unit =
|
// FIXME: Once we stop removing globals in `codegen_static`, we can uncomment this code.
|
||||||
self.codegen_unit.items().contains_key(&MonoItem::Static(def_id));
|
// let defined_in_current_codegen_unit =
|
||||||
assert!(
|
// self.codegen_unit.items().contains_key(&MonoItem::Static(def_id));
|
||||||
!defined_in_current_codegen_unit,
|
// assert!(
|
||||||
"consts::get_static() should always hit the cache for \
|
// !defined_in_current_codegen_unit,
|
||||||
statics defined in the same CGU, but did not for `{:?}`",
|
// "consts::get_static() should always hit the cache for \
|
||||||
def_id
|
// statics defined in the same CGU, but did not for `{:?}`",
|
||||||
);
|
// def_id
|
||||||
|
// );
|
||||||
let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all());
|
|
||||||
let sym = self.tcx.symbol_name(instance).name;
|
let sym = self.tcx.symbol_name(instance).name;
|
||||||
|
let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
|
||||||
|
|
||||||
let global = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
|
let global = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
|
||||||
let llty = self.layout_of(ty).gcc_type(self);
|
|
||||||
if let Some(global) = self.get_declared_value(sym) {
|
if let Some(global) = self.get_declared_value(sym) {
|
||||||
if self.val_ty(global) != self.type_ptr_to(llty) {
|
if self.val_ty(global) != self.type_ptr_to(gcc_type) {
|
||||||
span_bug!(self.tcx.def_span(def_id), "Conflicting types for static");
|
span_bug!(self.tcx.def_span(def_id), "Conflicting types for static");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let is_tls = fn_attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
let is_tls = fn_attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
||||||
let global = self.declare_global(
|
let global = self.declare_global(
|
||||||
&sym,
|
sym,
|
||||||
llty,
|
gcc_type,
|
||||||
GlobalKind::Exported,
|
GlobalKind::Exported,
|
||||||
is_tls,
|
is_tls,
|
||||||
fn_attrs.link_section,
|
fn_attrs.link_section,
|
||||||
|
@ -246,7 +262,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
|
|
||||||
global
|
global
|
||||||
} else {
|
} else {
|
||||||
check_and_apply_linkage(&self, &fn_attrs, ty, sym)
|
check_and_apply_linkage(self, fn_attrs, gcc_type, sym)
|
||||||
};
|
};
|
||||||
|
|
||||||
if !def_id.is_local() {
|
if !def_id.is_local() {
|
||||||
|
@ -360,18 +376,14 @@ fn codegen_static_initializer<'gcc, 'tcx>(
|
||||||
fn check_and_apply_linkage<'gcc, 'tcx>(
|
fn check_and_apply_linkage<'gcc, 'tcx>(
|
||||||
cx: &CodegenCx<'gcc, 'tcx>,
|
cx: &CodegenCx<'gcc, 'tcx>,
|
||||||
attrs: &CodegenFnAttrs,
|
attrs: &CodegenFnAttrs,
|
||||||
ty: Ty<'tcx>,
|
gcc_type: Type<'gcc>,
|
||||||
sym: &str,
|
sym: &str,
|
||||||
) -> LValue<'gcc> {
|
) -> LValue<'gcc> {
|
||||||
let is_tls = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
let is_tls = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
||||||
let gcc_type = cx.layout_of(ty).gcc_type(cx);
|
|
||||||
if let Some(linkage) = attrs.import_linkage {
|
if let Some(linkage) = attrs.import_linkage {
|
||||||
// Declare a symbol `foo` with the desired linkage.
|
// Declare a symbol `foo` with the desired linkage.
|
||||||
let global1 = cx.declare_global_with_linkage(
|
let global1 =
|
||||||
&sym,
|
cx.declare_global_with_linkage(sym, cx.type_i8(), base::global_linkage_to_gcc(linkage));
|
||||||
cx.type_i8(),
|
|
||||||
base::global_linkage_to_gcc(linkage),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Declare an internal global `extern_with_linkage_foo` which
|
// Declare an internal global `extern_with_linkage_foo` which
|
||||||
// is initialized with the address of `foo`. If `foo` is
|
// is initialized with the address of `foo`. If `foo` is
|
||||||
|
@ -380,7 +392,7 @@ fn check_and_apply_linkage<'gcc, 'tcx>(
|
||||||
// `extern_with_linkage_foo` will instead be initialized to
|
// `extern_with_linkage_foo` will instead be initialized to
|
||||||
// zero.
|
// zero.
|
||||||
let mut real_name = "_rust_extern_with_linkage_".to_string();
|
let mut real_name = "_rust_extern_with_linkage_".to_string();
|
||||||
real_name.push_str(&sym);
|
real_name.push_str(sym);
|
||||||
let global2 = cx.define_global(&real_name, gcc_type, is_tls, attrs.link_section);
|
let global2 = cx.define_global(&real_name, gcc_type, is_tls, attrs.link_section);
|
||||||
// TODO(antoyo): set linkage.
|
// TODO(antoyo): set linkage.
|
||||||
let value = cx.const_ptrcast(global1.get_address(None), gcc_type);
|
let value = cx.const_ptrcast(global1.get_address(None), gcc_type);
|
||||||
|
@ -397,6 +409,6 @@ fn check_and_apply_linkage<'gcc, 'tcx>(
|
||||||
// don't do this then linker errors can be generated where the linker
|
// don't do this then linker errors can be generated where the linker
|
||||||
// complains that one object files has a thread local version of the
|
// complains that one object files has a thread local version of the
|
||||||
// symbol and another one doesn't.
|
// symbol and another one doesn't.
|
||||||
cx.declare_global(&sym, gcc_type, GlobalKind::Imported, is_tls, attrs.link_section)
|
cx.declare_global(sym, gcc_type, GlobalKind::Imported, is_tls, attrs.link_section)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,6 +68,10 @@ pub struct CodegenCx<'gcc, 'tcx> {
|
||||||
pub sizet_type: Type<'gcc>,
|
pub sizet_type: Type<'gcc>,
|
||||||
|
|
||||||
pub supports_128bit_integers: bool,
|
pub supports_128bit_integers: bool,
|
||||||
|
pub supports_f16_type: bool,
|
||||||
|
pub supports_f32_type: bool,
|
||||||
|
pub supports_f64_type: bool,
|
||||||
|
pub supports_f128_type: bool,
|
||||||
|
|
||||||
pub float_type: Type<'gcc>,
|
pub float_type: Type<'gcc>,
|
||||||
pub double_type: Type<'gcc>,
|
pub double_type: Type<'gcc>,
|
||||||
|
@ -110,7 +114,7 @@ pub struct CodegenCx<'gcc, 'tcx> {
|
||||||
local_gen_sym_counter: Cell<usize>,
|
local_gen_sym_counter: Cell<usize>,
|
||||||
|
|
||||||
eh_personality: Cell<Option<RValue<'gcc>>>,
|
eh_personality: Cell<Option<RValue<'gcc>>>,
|
||||||
#[cfg(feature="master")]
|
#[cfg(feature = "master")]
|
||||||
pub rust_try_fn: Cell<Option<(Type<'gcc>, Function<'gcc>)>>,
|
pub rust_try_fn: Cell<Option<(Type<'gcc>, Function<'gcc>)>>,
|
||||||
|
|
||||||
pub pointee_infos: RefCell<FxHashMap<(Ty<'tcx>, Size), Option<PointeeInfo>>>,
|
pub pointee_infos: RefCell<FxHashMap<(Ty<'tcx>, Size), Option<PointeeInfo>>>,
|
||||||
|
@ -122,16 +126,21 @@ pub struct CodegenCx<'gcc, 'tcx> {
|
||||||
/// FIXME(antoyo): fix the rustc API to avoid having this hack.
|
/// FIXME(antoyo): fix the rustc API to avoid having this hack.
|
||||||
pub structs_as_pointer: RefCell<FxHashSet<RValue<'gcc>>>,
|
pub structs_as_pointer: RefCell<FxHashSet<RValue<'gcc>>>,
|
||||||
|
|
||||||
#[cfg(feature="master")]
|
#[cfg(feature = "master")]
|
||||||
pub cleanup_blocks: RefCell<FxHashSet<Block<'gcc>>>,
|
pub cleanup_blocks: RefCell<FxHashSet<Block<'gcc>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
context: &'gcc Context<'gcc>,
|
context: &'gcc Context<'gcc>,
|
||||||
codegen_unit: &'tcx CodegenUnit<'tcx>,
|
codegen_unit: &'tcx CodegenUnit<'tcx>,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
supports_128bit_integers: bool,
|
supports_128bit_integers: bool,
|
||||||
|
supports_f16_type: bool,
|
||||||
|
supports_f32_type: bool,
|
||||||
|
supports_f64_type: bool,
|
||||||
|
supports_f128_type: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let create_type = |ctype, rust_type| {
|
let create_type = |ctype, rust_type| {
|
||||||
let layout = tcx.layout_of(ParamEnv::reveal_all().and(rust_type)).unwrap();
|
let layout = tcx.layout_of(ParamEnv::reveal_all().and(rust_type)).unwrap();
|
||||||
|
@ -304,6 +313,10 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
sizet_type,
|
sizet_type,
|
||||||
|
|
||||||
supports_128bit_integers,
|
supports_128bit_integers,
|
||||||
|
supports_f16_type,
|
||||||
|
supports_f32_type,
|
||||||
|
supports_f64_type,
|
||||||
|
supports_f128_type,
|
||||||
|
|
||||||
float_type,
|
float_type,
|
||||||
double_type,
|
double_type,
|
||||||
|
@ -324,11 +337,11 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
struct_types: Default::default(),
|
struct_types: Default::default(),
|
||||||
local_gen_sym_counter: Cell::new(0),
|
local_gen_sym_counter: Cell::new(0),
|
||||||
eh_personality: Cell::new(None),
|
eh_personality: Cell::new(None),
|
||||||
#[cfg(feature="master")]
|
#[cfg(feature = "master")]
|
||||||
rust_try_fn: Cell::new(None),
|
rust_try_fn: Cell::new(None),
|
||||||
pointee_infos: Default::default(),
|
pointee_infos: Default::default(),
|
||||||
structs_as_pointer: Default::default(),
|
structs_as_pointer: Default::default(),
|
||||||
#[cfg(feature="master")]
|
#[cfg(feature = "master")]
|
||||||
cleanup_blocks: Default::default(),
|
cleanup_blocks: Default::default(),
|
||||||
};
|
};
|
||||||
// TODO(antoyo): instead of doing this, add SsizeT to libgccjit.
|
// TODO(antoyo): instead of doing this, add SsizeT to libgccjit.
|
||||||
|
@ -385,7 +398,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sess(&self) -> &'tcx Session {
|
pub fn sess(&self) -> &'tcx Session {
|
||||||
&self.tcx.sess
|
self.tcx.sess
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bitcast_if_needed(
|
pub fn bitcast_if_needed(
|
||||||
|
@ -432,7 +445,9 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
let func_name = self.tcx.symbol_name(instance).name;
|
let func_name = self.tcx.symbol_name(instance).name;
|
||||||
|
|
||||||
let func = if self.intrinsics.borrow().contains_key(func_name) {
|
let func = if self.intrinsics.borrow().contains_key(func_name) {
|
||||||
self.intrinsics.borrow()[func_name].clone()
|
self.intrinsics.borrow()[func_name]
|
||||||
|
} else if let Some(variable) = self.get_declared_value(func_name) {
|
||||||
|
return variable;
|
||||||
} else {
|
} else {
|
||||||
get_fn(self, instance)
|
get_fn(self, instance)
|
||||||
};
|
};
|
||||||
|
@ -485,7 +500,7 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
let symbol_name = tcx.symbol_name(instance).name;
|
let symbol_name = tcx.symbol_name(instance).name;
|
||||||
let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty());
|
let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty());
|
||||||
self.linkage.set(FunctionType::Extern);
|
self.linkage.set(FunctionType::Extern);
|
||||||
let func = self.declare_fn(symbol_name, &fn_abi);
|
let func = self.declare_fn(symbol_name, fn_abi);
|
||||||
let func: RValue<'gcc> = unsafe { std::mem::transmute(func) };
|
let func: RValue<'gcc> = unsafe { std::mem::transmute(func) };
|
||||||
func
|
func
|
||||||
}
|
}
|
||||||
|
@ -496,7 +511,7 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
"rust_eh_personality"
|
"rust_eh_personality"
|
||||||
};
|
};
|
||||||
let func = self.declare_func(name, self.type_i32(), &[], true);
|
let func = self.declare_func(name, self.type_i32(), &[], true);
|
||||||
unsafe { std::mem::transmute(func) }
|
unsafe { std::mem::transmute::<Function<'gcc>, RValue<'gcc>>(func) }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// TODO(antoyo): apply target cpu attributes.
|
// TODO(antoyo): apply target cpu attributes.
|
||||||
|
@ -505,7 +520,7 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sess(&self) -> &Session {
|
fn sess(&self) -> &Session {
|
||||||
&self.tcx.sess
|
self.tcx.sess
|
||||||
}
|
}
|
||||||
|
|
||||||
fn codegen_unit(&self) -> &'tcx CodegenUnit<'tcx> {
|
fn codegen_unit(&self) -> &'tcx CodegenUnit<'tcx> {
|
||||||
|
@ -522,7 +537,7 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
|
|
||||||
fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
|
fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
|
||||||
let entry_name = self.sess().target.entry_name.as_ref();
|
let entry_name = self.sess().target.entry_name.as_ref();
|
||||||
if self.get_declared_value(entry_name).is_none() {
|
if !self.functions.borrow().contains_key(entry_name) {
|
||||||
Some(self.declare_entry_fn(entry_name, fn_type, ()))
|
Some(self.declare_entry_fn(entry_name, fn_type, ()))
|
||||||
} else {
|
} else {
|
||||||
// If the symbol already exists, it is an error: for example, the user wrote
|
// If the symbol already exists, it is an error: for example, the user wrote
|
||||||
|
@ -614,7 +629,7 @@ impl<'b, 'tcx> CodegenCx<'b, 'tcx> {
|
||||||
// user defined names
|
// user defined names
|
||||||
let mut name = String::with_capacity(prefix.len() + 6);
|
let mut name = String::with_capacity(prefix.len() + 6);
|
||||||
name.push_str(prefix);
|
name.push_str(prefix);
|
||||||
name.push_str(".");
|
name.push('.');
|
||||||
name.push_str(&(idx as u64).to_base(ALPHANUMERIC_ONLY));
|
name.push_str(&(idx as u64).to_base(ALPHANUMERIC_ONLY));
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ fn compute_mir_scopes<'gcc, 'tcx>(
|
||||||
/// FIXME(tempdragon/?): Add Scope Support Here.
|
/// FIXME(tempdragon/?): Add Scope Support Here.
|
||||||
fn make_mir_scope<'gcc, 'tcx>(
|
fn make_mir_scope<'gcc, 'tcx>(
|
||||||
cx: &CodegenCx<'gcc, 'tcx>,
|
cx: &CodegenCx<'gcc, 'tcx>,
|
||||||
instance: Instance<'tcx>,
|
_instance: Instance<'tcx>,
|
||||||
mir: &Body<'tcx>,
|
mir: &Body<'tcx>,
|
||||||
variables: &Option<BitSet<SourceScope>>,
|
variables: &Option<BitSet<SourceScope>>,
|
||||||
debug_context: &mut FunctionDebugContext<'tcx, (), Location<'gcc>>,
|
debug_context: &mut FunctionDebugContext<'tcx, (), Location<'gcc>>,
|
||||||
|
@ -103,7 +103,7 @@ fn make_mir_scope<'gcc, 'tcx>(
|
||||||
|
|
||||||
let scope_data = &mir.source_scopes[scope];
|
let scope_data = &mir.source_scopes[scope];
|
||||||
let parent_scope = if let Some(parent) = scope_data.parent_scope {
|
let parent_scope = if let Some(parent) = scope_data.parent_scope {
|
||||||
make_mir_scope(cx, instance, mir, variables, debug_context, instantiated, parent);
|
make_mir_scope(cx, _instance, mir, variables, debug_context, instantiated, parent);
|
||||||
debug_context.scopes[parent]
|
debug_context.scopes[parent]
|
||||||
} else {
|
} else {
|
||||||
// The root is the function itself.
|
// The root is the function itself.
|
||||||
|
@ -117,7 +117,7 @@ fn make_mir_scope<'gcc, 'tcx>(
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(vars) = variables {
|
if let Some(ref vars) = *variables {
|
||||||
if !vars.contains(scope) && scope_data.inlined.is_none() {
|
if !vars.contains(scope) && scope_data.inlined.is_none() {
|
||||||
// Do not create a DIScope if there are no variables defined in this
|
// Do not create a DIScope if there are no variables defined in this
|
||||||
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
|
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
|
||||||
|
@ -135,8 +135,14 @@ fn make_mir_scope<'gcc, 'tcx>(
|
||||||
let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
|
let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
|
||||||
// FIXME(eddyb) this doesn't account for the macro-related
|
// FIXME(eddyb) this doesn't account for the macro-related
|
||||||
// `Span` fixups that `rustc_codegen_ssa::mir::debuginfo` does.
|
// `Span` fixups that `rustc_codegen_ssa::mir::debuginfo` does.
|
||||||
let callsite_scope = parent_scope.adjust_dbg_scope_for_span(cx, callsite_span);
|
|
||||||
cx.dbg_loc(callsite_scope, parent_scope.inlined_at, callsite_span)
|
// TODO(tempdragon): Add scope support and then revert to cg_llvm version of this closure
|
||||||
|
// NOTE: These variables passed () here.
|
||||||
|
// Changed to comply to clippy.
|
||||||
|
|
||||||
|
/* let callsite_scope = */
|
||||||
|
parent_scope.adjust_dbg_scope_for_span(cx, callsite_span);
|
||||||
|
cx.dbg_loc(/* callsite_scope */ (), parent_scope.inlined_at, callsite_span)
|
||||||
});
|
});
|
||||||
let p_inlined_at = parent_scope.inlined_at;
|
let p_inlined_at = parent_scope.inlined_at;
|
||||||
// TODO(tempdragon): dbg_scope: Add support for scope extension here.
|
// TODO(tempdragon): dbg_scope: Add support for scope extension here.
|
||||||
|
@ -224,7 +230,7 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
file_end_pos: BytePos(0),
|
file_end_pos: BytePos(0),
|
||||||
};
|
};
|
||||||
let mut fn_debug_context = FunctionDebugContext {
|
let mut fn_debug_context = FunctionDebugContext {
|
||||||
scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes.as_slice()),
|
scopes: IndexVec::from_elem(empty_scope, mir.source_scopes.as_slice()),
|
||||||
inlined_function_scopes: Default::default(),
|
inlined_function_scopes: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -273,16 +279,19 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
) -> Self::DILocation {
|
) -> Self::DILocation {
|
||||||
let pos = span.lo();
|
let pos = span.lo();
|
||||||
let DebugLoc { file, line, col } = self.lookup_debug_loc(pos);
|
let DebugLoc { file, line, col } = self.lookup_debug_loc(pos);
|
||||||
let loc = match &file.name {
|
let loc = match file.name {
|
||||||
rustc_span::FileName::Real(name) => match name {
|
rustc_span::FileName::Real(ref name) => match *name {
|
||||||
rustc_span::RealFileName::LocalPath(name) => {
|
rustc_span::RealFileName::LocalPath(ref name) => {
|
||||||
if let Some(name) = name.to_str() {
|
if let Some(name) = name.to_str() {
|
||||||
self.context.new_location(name, line as i32, col as i32)
|
self.context.new_location(name, line as i32, col as i32)
|
||||||
} else {
|
} else {
|
||||||
Location::null()
|
Location::null()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rustc_span::RealFileName::Remapped { local_path, virtual_name: _ } => {
|
rustc_span::RealFileName::Remapped {
|
||||||
|
ref local_path,
|
||||||
|
virtual_name: ref _unused,
|
||||||
|
} => {
|
||||||
if let Some(name) = local_path.as_ref() {
|
if let Some(name) = local_path.as_ref() {
|
||||||
if let Some(name) = name.to_str() {
|
if let Some(name) = name.to_str() {
|
||||||
self.context.new_location(name, line as i32, col as i32)
|
self.context.new_location(name, line as i32, col as i32)
|
||||||
|
|
|
@ -35,7 +35,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
|
|
||||||
pub fn declare_unnamed_global(&self, ty: Type<'gcc>) -> LValue<'gcc> {
|
pub fn declare_unnamed_global(&self, ty: Type<'gcc>) -> LValue<'gcc> {
|
||||||
let name = self.generate_local_symbol_name("global");
|
let name = self.generate_local_symbol_name("global");
|
||||||
self.context.new_global(None, GlobalKind::Internal, ty, &name)
|
self.context.new_global(None, GlobalKind::Internal, ty, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn declare_global_with_linkage(
|
pub fn declare_global_with_linkage(
|
||||||
|
@ -176,16 +176,14 @@ fn declare_raw_fn<'gcc>(
|
||||||
cx.functions.borrow()[name]
|
cx.functions.borrow()[name]
|
||||||
} else {
|
} else {
|
||||||
let params: Vec<_> = param_types
|
let params: Vec<_> = param_types
|
||||||
.into_iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(index, param)| {
|
.map(|(index, param)| cx.context.new_parameter(None, *param, format!("param{}", index))) // TODO(antoyo): set name.
|
||||||
cx.context.new_parameter(None, *param, &format!("param{}", index))
|
|
||||||
}) // TODO(antoyo): set name.
|
|
||||||
.collect();
|
.collect();
|
||||||
#[cfg(not(feature = "master"))]
|
#[cfg(not(feature = "master"))]
|
||||||
let name = mangle_name(name);
|
let name = &mangle_name(name);
|
||||||
let func =
|
let func =
|
||||||
cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, &name, variadic);
|
cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, name, variadic);
|
||||||
cx.functions.borrow_mut().insert(name.to_string(), func);
|
cx.functions.borrow_mut().insert(name.to_string(), func);
|
||||||
|
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
|
@ -200,10 +198,10 @@ fn declare_raw_fn<'gcc>(
|
||||||
// create a wrapper function that calls rust_eh_personality.
|
// create a wrapper function that calls rust_eh_personality.
|
||||||
|
|
||||||
let params: Vec<_> = param_types
|
let params: Vec<_> = param_types
|
||||||
.into_iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(index, param)| {
|
.map(|(index, param)| {
|
||||||
cx.context.new_parameter(None, *param, &format!("param{}", index))
|
cx.context.new_parameter(None, *param, format!("param{}", index))
|
||||||
}) // TODO(antoyo): set name.
|
}) // TODO(antoyo): set name.
|
||||||
.collect();
|
.collect();
|
||||||
let gcc_func = cx.context.new_function(
|
let gcc_func = cx.context.new_function(
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
//! This module exists because some integer types are not supported on some gcc platforms, e.g.
|
//! This module exists because some integer types are not supported on some gcc platforms, e.g.
|
||||||
//! 128-bit integers on 32-bit platforms and thus require to be handled manually.
|
//! 128-bit integers on 32-bit platforms and thus require to be handled manually.
|
||||||
|
|
||||||
use std::convert::TryFrom;
|
|
||||||
|
|
||||||
use gccjit::{BinaryOp, ComparisonOp, FunctionType, Location, RValue, ToRValue, Type, UnaryOp};
|
use gccjit::{BinaryOp, ComparisonOp, FunctionType, Location, RValue, ToRValue, Type, UnaryOp};
|
||||||
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
|
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
|
||||||
use rustc_codegen_ssa::traits::{BackendTypes, BaseTypeMethods, BuilderMethods, OverflowOp};
|
use rustc_codegen_ssa::traits::{BackendTypes, BaseTypeMethods, BuilderMethods, OverflowOp};
|
||||||
|
@ -40,7 +38,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
self.cx.context.new_unary_op(self.location, operation, typ, a)
|
self.cx.context.new_unary_op(self.location, operation, typ, a)
|
||||||
} else {
|
} else {
|
||||||
let element_type = typ.dyncast_array().expect("element type");
|
let element_type = typ.dyncast_array().expect("element type");
|
||||||
self.from_low_high_rvalues(
|
self.concat_low_high_rvalues(
|
||||||
typ,
|
typ,
|
||||||
self.cx.context.new_unary_op(
|
self.cx.context.new_unary_op(
|
||||||
self.location,
|
self.location,
|
||||||
|
@ -83,7 +81,19 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let b = self.context.new_cast(self.location, b, a_type);
|
let b = self.context.new_cast(self.location, b, a_type);
|
||||||
a >> b
|
a >> b
|
||||||
} else {
|
} else {
|
||||||
a >> b
|
let a_size = a_type.get_size();
|
||||||
|
let b_size = b_type.get_size();
|
||||||
|
match a_size.cmp(&b_size) {
|
||||||
|
std::cmp::Ordering::Less => {
|
||||||
|
let a = self.context.new_cast(self.location, a, b_type);
|
||||||
|
a >> b
|
||||||
|
}
|
||||||
|
std::cmp::Ordering::Equal => a >> b,
|
||||||
|
std::cmp::Ordering::Greater => {
|
||||||
|
let b = self.context.new_cast(self.location, b, a_type);
|
||||||
|
a >> b
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else if a_type.is_vector() && a_type.is_vector() {
|
} else if a_type.is_vector() && a_type.is_vector() {
|
||||||
a >> b
|
a >> b
|
||||||
|
@ -114,7 +124,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let shift_value = self.gcc_sub(b, sixty_four);
|
let shift_value = self.gcc_sub(b, sixty_four);
|
||||||
let high = self.high(a);
|
let high = self.high(a);
|
||||||
let sign = if a_type.is_signed(self) { high >> sixty_three } else { zero };
|
let sign = if a_type.is_signed(self) { high >> sixty_three } else { zero };
|
||||||
let array_value = self.from_low_high_rvalues(a_type, high >> shift_value, sign);
|
let array_value = self.concat_low_high_rvalues(a_type, high >> shift_value, sign);
|
||||||
then_block.add_assignment(self.location, result, array_value);
|
then_block.add_assignment(self.location, result, array_value);
|
||||||
then_block.end_with_jump(self.location, after_block);
|
then_block.end_with_jump(self.location, after_block);
|
||||||
|
|
||||||
|
@ -126,12 +136,15 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
let shift_value = self.gcc_sub(sixty_four, b);
|
let shift_value = self.gcc_sub(sixty_four, b);
|
||||||
// NOTE: cast low to its unsigned type in order to perform a logical right shift.
|
// NOTE: cast low to its unsigned type in order to perform a logical right shift.
|
||||||
let unsigned_type = native_int_type.to_unsigned(&self.cx);
|
let unsigned_type = native_int_type.to_unsigned(self.cx);
|
||||||
let casted_low = self.context.new_cast(self.location, self.low(a), unsigned_type);
|
let casted_low = self.context.new_cast(self.location, self.low(a), unsigned_type);
|
||||||
let shifted_low = casted_low >> self.context.new_cast(self.location, b, unsigned_type);
|
let shifted_low = casted_low >> self.context.new_cast(self.location, b, unsigned_type);
|
||||||
let shifted_low = self.context.new_cast(self.location, shifted_low, native_int_type);
|
let shifted_low = self.context.new_cast(self.location, shifted_low, native_int_type);
|
||||||
let array_value =
|
let array_value = self.concat_low_high_rvalues(
|
||||||
self.from_low_high_rvalues(a_type, (high << shift_value) | shifted_low, high >> b);
|
a_type,
|
||||||
|
(high << shift_value) | shifted_low,
|
||||||
|
high >> b,
|
||||||
|
);
|
||||||
actual_else_block.add_assignment(self.location, result, array_value);
|
actual_else_block.add_assignment(self.location, result, array_value);
|
||||||
actual_else_block.end_with_jump(self.location, after_block);
|
actual_else_block.end_with_jump(self.location, after_block);
|
||||||
|
|
||||||
|
@ -255,10 +268,10 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
) -> (<Self as BackendTypes>::Value, <Self as BackendTypes>::Value) {
|
) -> (<Self as BackendTypes>::Value, <Self as BackendTypes>::Value) {
|
||||||
use rustc_middle::ty::{Int, IntTy::*, Uint, UintTy::*};
|
use rustc_middle::ty::{Int, IntTy::*, Uint, UintTy::*};
|
||||||
|
|
||||||
let new_kind = match typ.kind() {
|
let new_kind = match *typ.kind() {
|
||||||
Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)),
|
Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)),
|
||||||
Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)),
|
Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)),
|
||||||
t @ (Uint(_) | Int(_)) => t.clone(),
|
t @ (Uint(_) | Int(_)) => t,
|
||||||
_ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
|
_ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -344,7 +357,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let intrinsic = self.context.get_builtin_function(&name);
|
let intrinsic = self.context.get_builtin_function(name);
|
||||||
let res = self
|
let res = self
|
||||||
.current_func()
|
.current_func()
|
||||||
// TODO(antoyo): is it correct to use rhs type instead of the parameter typ?
|
// TODO(antoyo): is it correct to use rhs type instead of the parameter typ?
|
||||||
|
@ -454,7 +467,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let native_int_type = a_type.dyncast_array().expect("get element type");
|
let native_int_type = a_type.dyncast_array().expect("get element type");
|
||||||
// NOTE: cast low to its unsigned type in order to perform a comparison correctly (e.g.
|
// NOTE: cast low to its unsigned type in order to perform a comparison correctly (e.g.
|
||||||
// the sign is only on high).
|
// the sign is only on high).
|
||||||
let unsigned_type = native_int_type.to_unsigned(&self.cx);
|
let unsigned_type = native_int_type.to_unsigned(self.cx);
|
||||||
|
|
||||||
let lhs_low = self.context.new_cast(self.location, self.low(lhs), unsigned_type);
|
let lhs_low = self.context.new_cast(self.location, self.low(lhs), unsigned_type);
|
||||||
let rhs_low = self.context.new_cast(self.location, self.low(rhs), unsigned_type);
|
let rhs_low = self.context.new_cast(self.location, self.low(rhs), unsigned_type);
|
||||||
|
@ -589,7 +602,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
| IntPredicate::IntULT
|
| IntPredicate::IntULT
|
||||||
| IntPredicate::IntULE => {
|
| IntPredicate::IntULE => {
|
||||||
if !a_type.is_vector() {
|
if !a_type.is_vector() {
|
||||||
let unsigned_type = a_type.to_unsigned(&self.cx);
|
let unsigned_type = a_type.to_unsigned(self.cx);
|
||||||
lhs = self.context.new_cast(self.location, lhs, unsigned_type);
|
lhs = self.context.new_cast(self.location, lhs, unsigned_type);
|
||||||
rhs = self.context.new_cast(self.location, rhs, unsigned_type);
|
rhs = self.context.new_cast(self.location, rhs, unsigned_type);
|
||||||
}
|
}
|
||||||
|
@ -612,7 +625,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
{
|
{
|
||||||
a ^ b
|
a ^ b
|
||||||
} else {
|
} else {
|
||||||
self.from_low_high_rvalues(
|
self.concat_low_high_rvalues(
|
||||||
a_type,
|
a_type,
|
||||||
self.low(a) ^ self.low(b),
|
self.low(a) ^ self.low(b),
|
||||||
self.high(a) ^ self.high(b),
|
self.high(a) ^ self.high(b),
|
||||||
|
@ -635,7 +648,19 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let b = self.context.new_cast(self.location, b, a_type);
|
let b = self.context.new_cast(self.location, b, a_type);
|
||||||
a << b
|
a << b
|
||||||
} else {
|
} else {
|
||||||
a << b
|
let a_size = a_type.get_size();
|
||||||
|
let b_size = b_type.get_size();
|
||||||
|
match a_size.cmp(&b_size) {
|
||||||
|
std::cmp::Ordering::Less => {
|
||||||
|
let a = self.context.new_cast(self.location, a, b_type);
|
||||||
|
a << b
|
||||||
|
}
|
||||||
|
std::cmp::Ordering::Equal => a << b,
|
||||||
|
std::cmp::Ordering::Greater => {
|
||||||
|
let b = self.context.new_cast(self.location, b, a_type);
|
||||||
|
a << b
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else if a_type.is_vector() && a_type.is_vector() {
|
} else if a_type.is_vector() && a_type.is_vector() {
|
||||||
a << b
|
a << b
|
||||||
|
@ -661,7 +686,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
self.llbb().end_with_conditional(self.location, condition, then_block, else_block);
|
self.llbb().end_with_conditional(self.location, condition, then_block, else_block);
|
||||||
|
|
||||||
let array_value =
|
let array_value =
|
||||||
self.from_low_high_rvalues(a_type, zero, self.low(a) << (b - sixty_four));
|
self.concat_low_high_rvalues(a_type, zero, self.low(a) << (b - sixty_four));
|
||||||
then_block.add_assignment(self.location, result, array_value);
|
then_block.add_assignment(self.location, result, array_value);
|
||||||
then_block.end_with_jump(self.location, after_block);
|
then_block.end_with_jump(self.location, after_block);
|
||||||
|
|
||||||
|
@ -673,13 +698,13 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
// NOTE: cast low to its unsigned type in order to perform a logical right shift.
|
// NOTE: cast low to its unsigned type in order to perform a logical right shift.
|
||||||
// TODO(antoyo): adjust this ^ comment.
|
// TODO(antoyo): adjust this ^ comment.
|
||||||
let unsigned_type = native_int_type.to_unsigned(&self.cx);
|
let unsigned_type = native_int_type.to_unsigned(self.cx);
|
||||||
let casted_low = self.context.new_cast(self.location, self.low(a), unsigned_type);
|
let casted_low = self.context.new_cast(self.location, self.low(a), unsigned_type);
|
||||||
let shift_value = self.context.new_cast(self.location, sixty_four - b, unsigned_type);
|
let shift_value = self.context.new_cast(self.location, sixty_four - b, unsigned_type);
|
||||||
let high_low =
|
let high_low =
|
||||||
self.context.new_cast(self.location, casted_low >> shift_value, native_int_type);
|
self.context.new_cast(self.location, casted_low >> shift_value, native_int_type);
|
||||||
|
|
||||||
let array_value = self.from_low_high_rvalues(
|
let array_value = self.concat_low_high_rvalues(
|
||||||
a_type,
|
a_type,
|
||||||
self.low(a) << b,
|
self.low(a) << b,
|
||||||
(self.high(a) << b) | high_low,
|
(self.high(a) << b) | high_low,
|
||||||
|
@ -708,7 +733,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
// NOTE: we also need to swap the two elements here, in addition to swapping inside
|
// NOTE: we also need to swap the two elements here, in addition to swapping inside
|
||||||
// the elements themselves like done above.
|
// the elements themselves like done above.
|
||||||
return self.from_low_high_rvalues(arg_type, swapped_msb, swapped_lsb);
|
return self.concat_low_high_rvalues(arg_type, swapped_msb, swapped_lsb);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(antoyo): check if it's faster to use string literals and a
|
// TODO(antoyo): check if it's faster to use string literals and a
|
||||||
|
@ -727,10 +752,10 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
pub fn gcc_int(&self, typ: Type<'gcc>, int: i64) -> RValue<'gcc> {
|
pub fn gcc_int(&self, typ: Type<'gcc>, int: i64) -> RValue<'gcc> {
|
||||||
if self.is_native_int_type_or_bool(typ) {
|
if self.is_native_int_type_or_bool(typ) {
|
||||||
self.context.new_rvalue_from_long(typ, i64::try_from(int).expect("i64::try_from"))
|
self.context.new_rvalue_from_long(typ, int)
|
||||||
} else {
|
} else {
|
||||||
// NOTE: set the sign in high.
|
// NOTE: set the sign in high.
|
||||||
self.from_low_high(typ, int, -(int.is_negative() as i64))
|
self.concat_low_high(typ, int, -(int.is_negative() as i64))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -740,10 +765,9 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
let num = self.context.new_rvalue_from_long(self.u64_type, int as i64);
|
let num = self.context.new_rvalue_from_long(self.u64_type, int as i64);
|
||||||
self.gcc_int_cast(num, typ)
|
self.gcc_int_cast(num, typ)
|
||||||
} else if self.is_native_int_type_or_bool(typ) {
|
} else if self.is_native_int_type_or_bool(typ) {
|
||||||
self.context
|
self.context.new_rvalue_from_long(typ, int as i64)
|
||||||
.new_rvalue_from_long(typ, u64::try_from(int).expect("u64::try_from") as i64)
|
|
||||||
} else {
|
} else {
|
||||||
self.from_low_high(typ, int as i64, 0)
|
self.concat_low_high(typ, int as i64, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -760,7 +784,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
let shift = high << sixty_four;
|
let shift = high << sixty_four;
|
||||||
shift | self.context.new_cast(None, low, typ)
|
shift | self.context.new_cast(None, low, typ)
|
||||||
} else {
|
} else {
|
||||||
self.from_low_high(typ, low as i64, high as i64)
|
self.concat_low_high(typ, low as i64, high as i64)
|
||||||
}
|
}
|
||||||
} else if typ.is_i128(self) {
|
} else if typ.is_i128(self) {
|
||||||
// FIXME(antoyo): libgccjit cannot create 128-bit values yet.
|
// FIXME(antoyo): libgccjit cannot create 128-bit values yet.
|
||||||
|
@ -775,7 +799,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
if self.is_native_int_type_or_bool(typ) {
|
if self.is_native_int_type_or_bool(typ) {
|
||||||
self.context.new_rvalue_zero(typ)
|
self.context.new_rvalue_zero(typ)
|
||||||
} else {
|
} else {
|
||||||
self.from_low_high(typ, 0, 0)
|
self.concat_low_high(typ, 0, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -813,7 +837,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
"both types should either be native or non-native for or operation"
|
"both types should either be native or non-native for or operation"
|
||||||
);
|
);
|
||||||
let native_int_type = a_type.dyncast_array().expect("get element type");
|
let native_int_type = a_type.dyncast_array().expect("get element type");
|
||||||
self.from_low_high_rvalues(
|
self.concat_low_high_rvalues(
|
||||||
a_type,
|
a_type,
|
||||||
self.context.new_binary_op(
|
self.context.new_binary_op(
|
||||||
loc,
|
loc,
|
||||||
|
@ -858,7 +882,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
let is_negative =
|
let is_negative =
|
||||||
self.context.new_comparison(None, ComparisonOp::LessThan, value, zero);
|
self.context.new_comparison(None, ComparisonOp::LessThan, value, zero);
|
||||||
let is_negative = self.gcc_int_cast(is_negative, dest_element_type);
|
let is_negative = self.gcc_int_cast(is_negative, dest_element_type);
|
||||||
self.from_low_high_rvalues(
|
self.concat_low_high_rvalues(
|
||||||
dest_typ,
|
dest_typ,
|
||||||
self.context.new_cast(None, value, dest_element_type),
|
self.context.new_cast(None, value, dest_element_type),
|
||||||
self.context.new_unary_op(None, UnaryOp::Minus, dest_element_type, is_negative),
|
self.context.new_unary_op(None, UnaryOp::Minus, dest_element_type, is_negative),
|
||||||
|
@ -926,7 +950,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
return self.context.new_cast(None, value, dest_typ);
|
return self.context.new_cast(None, value, dest_typ);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert!(value_type.dyncast_array().is_some());
|
debug_assert!(dest_typ.dyncast_array().is_some());
|
||||||
let name_suffix = match self.type_kind(value_type) {
|
let name_suffix = match self.type_kind(value_type) {
|
||||||
TypeKind::Float => "sfti",
|
TypeKind::Float => "sfti",
|
||||||
TypeKind::Double => "dfti",
|
TypeKind::Double => "dfti",
|
||||||
|
@ -978,7 +1002,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
.to_rvalue()
|
.to_rvalue()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_low_high_rvalues(
|
fn concat_low_high_rvalues(
|
||||||
&self,
|
&self,
|
||||||
typ: Type<'gcc>,
|
typ: Type<'gcc>,
|
||||||
low: RValue<'gcc>,
|
low: RValue<'gcc>,
|
||||||
|
@ -993,7 +1017,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
self.context.new_array_constructor(None, typ, &values)
|
self.context.new_array_constructor(None, typ, &values)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_low_high(&self, typ: Type<'gcc>, low: i64, high: i64) -> RValue<'gcc> {
|
fn concat_low_high(&self, typ: Type<'gcc>, low: i64, high: i64) -> RValue<'gcc> {
|
||||||
let (first, last) = match self.sess().target.options.endian {
|
let (first, last) = match self.sess().target.options.endian {
|
||||||
Endian::Little => (low, high),
|
Endian::Little => (low, high),
|
||||||
Endian::Big => (high, low),
|
Endian::Big => (high, low),
|
||||||
|
|
|
@ -74,6 +74,10 @@ match name {
|
||||||
"llvm.amdgcn.cvt.sr.bf8.f32" => "__builtin_amdgcn_cvt_sr_bf8_f32",
|
"llvm.amdgcn.cvt.sr.bf8.f32" => "__builtin_amdgcn_cvt_sr_bf8_f32",
|
||||||
"llvm.amdgcn.cvt.sr.fp8.f32" => "__builtin_amdgcn_cvt_sr_fp8_f32",
|
"llvm.amdgcn.cvt.sr.fp8.f32" => "__builtin_amdgcn_cvt_sr_fp8_f32",
|
||||||
"llvm.amdgcn.dispatch.id" => "__builtin_amdgcn_dispatch_id",
|
"llvm.amdgcn.dispatch.id" => "__builtin_amdgcn_dispatch_id",
|
||||||
|
"llvm.amdgcn.dot4.f32.bf8.bf8" => "__builtin_amdgcn_dot4_f32_bf8_bf8",
|
||||||
|
"llvm.amdgcn.dot4.f32.bf8.fp8" => "__builtin_amdgcn_dot4_f32_bf8_fp8",
|
||||||
|
"llvm.amdgcn.dot4.f32.fp8.bf8" => "__builtin_amdgcn_dot4_f32_fp8_bf8",
|
||||||
|
"llvm.amdgcn.dot4.f32.fp8.fp8" => "__builtin_amdgcn_dot4_f32_fp8_fp8",
|
||||||
"llvm.amdgcn.ds.add.gs.reg.rtn" => "__builtin_amdgcn_ds_add_gs_reg_rtn",
|
"llvm.amdgcn.ds.add.gs.reg.rtn" => "__builtin_amdgcn_ds_add_gs_reg_rtn",
|
||||||
"llvm.amdgcn.ds.bpermute" => "__builtin_amdgcn_ds_bpermute",
|
"llvm.amdgcn.ds.bpermute" => "__builtin_amdgcn_ds_bpermute",
|
||||||
"llvm.amdgcn.ds.fadd.v2bf16" => "__builtin_amdgcn_ds_atomic_fadd_v2bf16",
|
"llvm.amdgcn.ds.fadd.v2bf16" => "__builtin_amdgcn_ds_atomic_fadd_v2bf16",
|
||||||
|
@ -2291,6 +2295,10 @@ match name {
|
||||||
"llvm.loongarch.csrxchg.d" => "__builtin_loongarch_csrxchg_d",
|
"llvm.loongarch.csrxchg.d" => "__builtin_loongarch_csrxchg_d",
|
||||||
"llvm.loongarch.csrxchg.w" => "__builtin_loongarch_csrxchg_w",
|
"llvm.loongarch.csrxchg.w" => "__builtin_loongarch_csrxchg_w",
|
||||||
"llvm.loongarch.dbar" => "__builtin_loongarch_dbar",
|
"llvm.loongarch.dbar" => "__builtin_loongarch_dbar",
|
||||||
|
"llvm.loongarch.frecipe.d" => "__builtin_loongarch_frecipe_d",
|
||||||
|
"llvm.loongarch.frecipe.s" => "__builtin_loongarch_frecipe_s",
|
||||||
|
"llvm.loongarch.frsqrte.d" => "__builtin_loongarch_frsqrte_d",
|
||||||
|
"llvm.loongarch.frsqrte.s" => "__builtin_loongarch_frsqrte_s",
|
||||||
"llvm.loongarch.ibar" => "__builtin_loongarch_ibar",
|
"llvm.loongarch.ibar" => "__builtin_loongarch_ibar",
|
||||||
"llvm.loongarch.iocsrrd.b" => "__builtin_loongarch_iocsrrd_b",
|
"llvm.loongarch.iocsrrd.b" => "__builtin_loongarch_iocsrrd_b",
|
||||||
"llvm.loongarch.iocsrrd.d" => "__builtin_loongarch_iocsrrd_d",
|
"llvm.loongarch.iocsrrd.d" => "__builtin_loongarch_iocsrrd_d",
|
||||||
|
@ -2529,6 +2537,8 @@ match name {
|
||||||
"llvm.loongarch.lasx.xvfnmsub.s" => "__builtin_lasx_xvfnmsub_s",
|
"llvm.loongarch.lasx.xvfnmsub.s" => "__builtin_lasx_xvfnmsub_s",
|
||||||
"llvm.loongarch.lasx.xvfrecip.d" => "__builtin_lasx_xvfrecip_d",
|
"llvm.loongarch.lasx.xvfrecip.d" => "__builtin_lasx_xvfrecip_d",
|
||||||
"llvm.loongarch.lasx.xvfrecip.s" => "__builtin_lasx_xvfrecip_s",
|
"llvm.loongarch.lasx.xvfrecip.s" => "__builtin_lasx_xvfrecip_s",
|
||||||
|
"llvm.loongarch.lasx.xvfrecipe.d" => "__builtin_lasx_xvfrecipe_d",
|
||||||
|
"llvm.loongarch.lasx.xvfrecipe.s" => "__builtin_lasx_xvfrecipe_s",
|
||||||
"llvm.loongarch.lasx.xvfrint.d" => "__builtin_lasx_xvfrint_d",
|
"llvm.loongarch.lasx.xvfrint.d" => "__builtin_lasx_xvfrint_d",
|
||||||
"llvm.loongarch.lasx.xvfrint.s" => "__builtin_lasx_xvfrint_s",
|
"llvm.loongarch.lasx.xvfrint.s" => "__builtin_lasx_xvfrint_s",
|
||||||
"llvm.loongarch.lasx.xvfrintrm.d" => "__builtin_lasx_xvfrintrm_d",
|
"llvm.loongarch.lasx.xvfrintrm.d" => "__builtin_lasx_xvfrintrm_d",
|
||||||
|
@ -2541,6 +2551,8 @@ match name {
|
||||||
"llvm.loongarch.lasx.xvfrintrz.s" => "__builtin_lasx_xvfrintrz_s",
|
"llvm.loongarch.lasx.xvfrintrz.s" => "__builtin_lasx_xvfrintrz_s",
|
||||||
"llvm.loongarch.lasx.xvfrsqrt.d" => "__builtin_lasx_xvfrsqrt_d",
|
"llvm.loongarch.lasx.xvfrsqrt.d" => "__builtin_lasx_xvfrsqrt_d",
|
||||||
"llvm.loongarch.lasx.xvfrsqrt.s" => "__builtin_lasx_xvfrsqrt_s",
|
"llvm.loongarch.lasx.xvfrsqrt.s" => "__builtin_lasx_xvfrsqrt_s",
|
||||||
|
"llvm.loongarch.lasx.xvfrsqrte.d" => "__builtin_lasx_xvfrsqrte_d",
|
||||||
|
"llvm.loongarch.lasx.xvfrsqrte.s" => "__builtin_lasx_xvfrsqrte_s",
|
||||||
"llvm.loongarch.lasx.xvfrstp.b" => "__builtin_lasx_xvfrstp_b",
|
"llvm.loongarch.lasx.xvfrstp.b" => "__builtin_lasx_xvfrstp_b",
|
||||||
"llvm.loongarch.lasx.xvfrstp.h" => "__builtin_lasx_xvfrstp_h",
|
"llvm.loongarch.lasx.xvfrstp.h" => "__builtin_lasx_xvfrstp_h",
|
||||||
"llvm.loongarch.lasx.xvfrstpi.b" => "__builtin_lasx_xvfrstpi_b",
|
"llvm.loongarch.lasx.xvfrstpi.b" => "__builtin_lasx_xvfrstpi_b",
|
||||||
|
@ -3255,6 +3267,8 @@ match name {
|
||||||
"llvm.loongarch.lsx.vfnmsub.s" => "__builtin_lsx_vfnmsub_s",
|
"llvm.loongarch.lsx.vfnmsub.s" => "__builtin_lsx_vfnmsub_s",
|
||||||
"llvm.loongarch.lsx.vfrecip.d" => "__builtin_lsx_vfrecip_d",
|
"llvm.loongarch.lsx.vfrecip.d" => "__builtin_lsx_vfrecip_d",
|
||||||
"llvm.loongarch.lsx.vfrecip.s" => "__builtin_lsx_vfrecip_s",
|
"llvm.loongarch.lsx.vfrecip.s" => "__builtin_lsx_vfrecip_s",
|
||||||
|
"llvm.loongarch.lsx.vfrecipe.d" => "__builtin_lsx_vfrecipe_d",
|
||||||
|
"llvm.loongarch.lsx.vfrecipe.s" => "__builtin_lsx_vfrecipe_s",
|
||||||
"llvm.loongarch.lsx.vfrint.d" => "__builtin_lsx_vfrint_d",
|
"llvm.loongarch.lsx.vfrint.d" => "__builtin_lsx_vfrint_d",
|
||||||
"llvm.loongarch.lsx.vfrint.s" => "__builtin_lsx_vfrint_s",
|
"llvm.loongarch.lsx.vfrint.s" => "__builtin_lsx_vfrint_s",
|
||||||
"llvm.loongarch.lsx.vfrintrm.d" => "__builtin_lsx_vfrintrm_d",
|
"llvm.loongarch.lsx.vfrintrm.d" => "__builtin_lsx_vfrintrm_d",
|
||||||
|
@ -3267,6 +3281,8 @@ match name {
|
||||||
"llvm.loongarch.lsx.vfrintrz.s" => "__builtin_lsx_vfrintrz_s",
|
"llvm.loongarch.lsx.vfrintrz.s" => "__builtin_lsx_vfrintrz_s",
|
||||||
"llvm.loongarch.lsx.vfrsqrt.d" => "__builtin_lsx_vfrsqrt_d",
|
"llvm.loongarch.lsx.vfrsqrt.d" => "__builtin_lsx_vfrsqrt_d",
|
||||||
"llvm.loongarch.lsx.vfrsqrt.s" => "__builtin_lsx_vfrsqrt_s",
|
"llvm.loongarch.lsx.vfrsqrt.s" => "__builtin_lsx_vfrsqrt_s",
|
||||||
|
"llvm.loongarch.lsx.vfrsqrte.d" => "__builtin_lsx_vfrsqrte_d",
|
||||||
|
"llvm.loongarch.lsx.vfrsqrte.s" => "__builtin_lsx_vfrsqrte_s",
|
||||||
"llvm.loongarch.lsx.vfrstp.b" => "__builtin_lsx_vfrstp_b",
|
"llvm.loongarch.lsx.vfrstp.b" => "__builtin_lsx_vfrstp_b",
|
||||||
"llvm.loongarch.lsx.vfrstp.h" => "__builtin_lsx_vfrstp_h",
|
"llvm.loongarch.lsx.vfrstp.h" => "__builtin_lsx_vfrstp_h",
|
||||||
"llvm.loongarch.lsx.vfrstpi.b" => "__builtin_lsx_vfrstpi_b",
|
"llvm.loongarch.lsx.vfrstpi.b" => "__builtin_lsx_vfrstpi_b",
|
||||||
|
@ -4434,6 +4450,7 @@ match name {
|
||||||
"llvm.nvvm.abs.bf16x2" => "__nvvm_abs_bf16x2",
|
"llvm.nvvm.abs.bf16x2" => "__nvvm_abs_bf16x2",
|
||||||
"llvm.nvvm.abs.i" => "__nvvm_abs_i",
|
"llvm.nvvm.abs.i" => "__nvvm_abs_i",
|
||||||
"llvm.nvvm.abs.ll" => "__nvvm_abs_ll",
|
"llvm.nvvm.abs.ll" => "__nvvm_abs_ll",
|
||||||
|
"llvm.nvvm.activemask" => "__nvvm_activemask",
|
||||||
"llvm.nvvm.add.rm.d" => "__nvvm_add_rm_d",
|
"llvm.nvvm.add.rm.d" => "__nvvm_add_rm_d",
|
||||||
"llvm.nvvm.add.rm.f" => "__nvvm_add_rm_f",
|
"llvm.nvvm.add.rm.f" => "__nvvm_add_rm_f",
|
||||||
"llvm.nvvm.add.rm.ftz.f" => "__nvvm_add_rm_ftz_f",
|
"llvm.nvvm.add.rm.ftz.f" => "__nvvm_add_rm_ftz_f",
|
||||||
|
@ -4522,6 +4539,7 @@ match name {
|
||||||
"llvm.nvvm.ex2.approx.d" => "__nvvm_ex2_approx_d",
|
"llvm.nvvm.ex2.approx.d" => "__nvvm_ex2_approx_d",
|
||||||
"llvm.nvvm.ex2.approx.f" => "__nvvm_ex2_approx_f",
|
"llvm.nvvm.ex2.approx.f" => "__nvvm_ex2_approx_f",
|
||||||
"llvm.nvvm.ex2.approx.ftz.f" => "__nvvm_ex2_approx_ftz_f",
|
"llvm.nvvm.ex2.approx.ftz.f" => "__nvvm_ex2_approx_ftz_f",
|
||||||
|
"llvm.nvvm.exit" => "__nvvm_exit",
|
||||||
"llvm.nvvm.f2bf16.rn" => "__nvvm_f2bf16_rn",
|
"llvm.nvvm.f2bf16.rn" => "__nvvm_f2bf16_rn",
|
||||||
"llvm.nvvm.f2bf16.rn.relu" => "__nvvm_f2bf16_rn_relu",
|
"llvm.nvvm.f2bf16.rn.relu" => "__nvvm_f2bf16_rn_relu",
|
||||||
"llvm.nvvm.f2bf16.rz" => "__nvvm_f2bf16_rz",
|
"llvm.nvvm.f2bf16.rz" => "__nvvm_f2bf16_rz",
|
||||||
|
@ -4722,8 +4740,11 @@ match name {
|
||||||
"llvm.nvvm.mul24.ui" => "__nvvm_mul24_ui",
|
"llvm.nvvm.mul24.ui" => "__nvvm_mul24_ui",
|
||||||
"llvm.nvvm.mulhi.i" => "__nvvm_mulhi_i",
|
"llvm.nvvm.mulhi.i" => "__nvvm_mulhi_i",
|
||||||
"llvm.nvvm.mulhi.ll" => "__nvvm_mulhi_ll",
|
"llvm.nvvm.mulhi.ll" => "__nvvm_mulhi_ll",
|
||||||
|
"llvm.nvvm.mulhi.s" => "__nvvm_mulhi_s",
|
||||||
"llvm.nvvm.mulhi.ui" => "__nvvm_mulhi_ui",
|
"llvm.nvvm.mulhi.ui" => "__nvvm_mulhi_ui",
|
||||||
"llvm.nvvm.mulhi.ull" => "__nvvm_mulhi_ull",
|
"llvm.nvvm.mulhi.ull" => "__nvvm_mulhi_ull",
|
||||||
|
"llvm.nvvm.mulhi.us" => "__nvvm_mulhi_us",
|
||||||
|
"llvm.nvvm.nanosleep" => "__nvvm_nanosleep",
|
||||||
"llvm.nvvm.neg.bf16" => "__nvvm_neg_bf16",
|
"llvm.nvvm.neg.bf16" => "__nvvm_neg_bf16",
|
||||||
"llvm.nvvm.neg.bf16x2" => "__nvvm_neg_bf16x2",
|
"llvm.nvvm.neg.bf16x2" => "__nvvm_neg_bf16x2",
|
||||||
"llvm.nvvm.popc.i" => "__nvvm_popc_i",
|
"llvm.nvvm.popc.i" => "__nvvm_popc_i",
|
||||||
|
@ -4783,6 +4804,7 @@ match name {
|
||||||
"llvm.nvvm.read.ptx.sreg.envreg7" => "__nvvm_read_ptx_sreg_envreg7",
|
"llvm.nvvm.read.ptx.sreg.envreg7" => "__nvvm_read_ptx_sreg_envreg7",
|
||||||
"llvm.nvvm.read.ptx.sreg.envreg8" => "__nvvm_read_ptx_sreg_envreg8",
|
"llvm.nvvm.read.ptx.sreg.envreg8" => "__nvvm_read_ptx_sreg_envreg8",
|
||||||
"llvm.nvvm.read.ptx.sreg.envreg9" => "__nvvm_read_ptx_sreg_envreg9",
|
"llvm.nvvm.read.ptx.sreg.envreg9" => "__nvvm_read_ptx_sreg_envreg9",
|
||||||
|
"llvm.nvvm.read.ptx.sreg.globaltimer" => "__nvvm_read_ptx_sreg_globaltimer",
|
||||||
"llvm.nvvm.read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_gridid",
|
"llvm.nvvm.read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_gridid",
|
||||||
// [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_",
|
// [DUPLICATE]: "llvm.nvvm.read.ptx.sreg.gridid" => "__nvvm_read_ptx_sreg_",
|
||||||
"llvm.nvvm.read.ptx.sreg.laneid" => "__nvvm_read_ptx_sreg_laneid",
|
"llvm.nvvm.read.ptx.sreg.laneid" => "__nvvm_read_ptx_sreg_laneid",
|
||||||
|
@ -4835,6 +4857,7 @@ match name {
|
||||||
"llvm.nvvm.redux.sync.umax" => "__nvvm_redux_sync_umax",
|
"llvm.nvvm.redux.sync.umax" => "__nvvm_redux_sync_umax",
|
||||||
"llvm.nvvm.redux.sync.umin" => "__nvvm_redux_sync_umin",
|
"llvm.nvvm.redux.sync.umin" => "__nvvm_redux_sync_umin",
|
||||||
"llvm.nvvm.redux.sync.xor" => "__nvvm_redux_sync_xor",
|
"llvm.nvvm.redux.sync.xor" => "__nvvm_redux_sync_xor",
|
||||||
|
"llvm.nvvm.reflect" => "__nvvm_reflect",
|
||||||
"llvm.nvvm.rotate.b32" => "__nvvm_rotate_b32",
|
"llvm.nvvm.rotate.b32" => "__nvvm_rotate_b32",
|
||||||
"llvm.nvvm.rotate.b64" => "__nvvm_rotate_b64",
|
"llvm.nvvm.rotate.b64" => "__nvvm_rotate_b64",
|
||||||
"llvm.nvvm.rotate.right.b64" => "__nvvm_rotate_right_b64",
|
"llvm.nvvm.rotate.right.b64" => "__nvvm_rotate_right_b64",
|
||||||
|
@ -4845,7 +4868,11 @@ match name {
|
||||||
"llvm.nvvm.rsqrt.approx.f" => "__nvvm_rsqrt_approx_f",
|
"llvm.nvvm.rsqrt.approx.f" => "__nvvm_rsqrt_approx_f",
|
||||||
"llvm.nvvm.rsqrt.approx.ftz.f" => "__nvvm_rsqrt_approx_ftz_f",
|
"llvm.nvvm.rsqrt.approx.ftz.f" => "__nvvm_rsqrt_approx_ftz_f",
|
||||||
"llvm.nvvm.sad.i" => "__nvvm_sad_i",
|
"llvm.nvvm.sad.i" => "__nvvm_sad_i",
|
||||||
|
"llvm.nvvm.sad.ll" => "__nvvm_sad_ll",
|
||||||
|
"llvm.nvvm.sad.s" => "__nvvm_sad_s",
|
||||||
"llvm.nvvm.sad.ui" => "__nvvm_sad_ui",
|
"llvm.nvvm.sad.ui" => "__nvvm_sad_ui",
|
||||||
|
"llvm.nvvm.sad.ull" => "__nvvm_sad_ull",
|
||||||
|
"llvm.nvvm.sad.us" => "__nvvm_sad_us",
|
||||||
"llvm.nvvm.saturate.d" => "__nvvm_saturate_d",
|
"llvm.nvvm.saturate.d" => "__nvvm_saturate_d",
|
||||||
"llvm.nvvm.saturate.f" => "__nvvm_saturate_f",
|
"llvm.nvvm.saturate.f" => "__nvvm_saturate_f",
|
||||||
"llvm.nvvm.saturate.ftz.f" => "__nvvm_saturate_ftz_f",
|
"llvm.nvvm.saturate.ftz.f" => "__nvvm_saturate_ftz_f",
|
||||||
|
@ -5471,6 +5498,7 @@ match name {
|
||||||
"llvm.ppc.fctiwz" => "__builtin_ppc_fctiwz",
|
"llvm.ppc.fctiwz" => "__builtin_ppc_fctiwz",
|
||||||
"llvm.ppc.fctudz" => "__builtin_ppc_fctudz",
|
"llvm.ppc.fctudz" => "__builtin_ppc_fctudz",
|
||||||
"llvm.ppc.fctuwz" => "__builtin_ppc_fctuwz",
|
"llvm.ppc.fctuwz" => "__builtin_ppc_fctuwz",
|
||||||
|
"llvm.ppc.fence" => "__builtin_ppc_fence",
|
||||||
"llvm.ppc.fmaf128.round.to.odd" => "__builtin_fmaf128_round_to_odd",
|
"llvm.ppc.fmaf128.round.to.odd" => "__builtin_fmaf128_round_to_odd",
|
||||||
"llvm.ppc.fmsub" => "__builtin_ppc_fmsub",
|
"llvm.ppc.fmsub" => "__builtin_ppc_fmsub",
|
||||||
"llvm.ppc.fmsubs" => "__builtin_ppc_fmsubs",
|
"llvm.ppc.fmsubs" => "__builtin_ppc_fmsubs",
|
||||||
|
@ -5599,6 +5627,9 @@ match name {
|
||||||
"llvm.ppc.qpx.qvstfs" => "__builtin_qpx_qvstfs",
|
"llvm.ppc.qpx.qvstfs" => "__builtin_qpx_qvstfs",
|
||||||
"llvm.ppc.qpx.qvstfsa" => "__builtin_qpx_qvstfsa",
|
"llvm.ppc.qpx.qvstfsa" => "__builtin_qpx_qvstfsa",
|
||||||
"llvm.ppc.readflm" => "__builtin_readflm",
|
"llvm.ppc.readflm" => "__builtin_readflm",
|
||||||
|
"llvm.ppc.rldimi" => "__builtin_ppc_rldimi",
|
||||||
|
"llvm.ppc.rlwimi" => "__builtin_ppc_rlwimi",
|
||||||
|
"llvm.ppc.rlwnm" => "__builtin_ppc_rlwnm",
|
||||||
"llvm.ppc.scalar.extract.expq" => "__builtin_vsx_scalar_extract_expq",
|
"llvm.ppc.scalar.extract.expq" => "__builtin_vsx_scalar_extract_expq",
|
||||||
"llvm.ppc.scalar.insert.exp.qp" => "__builtin_vsx_scalar_insert_exp_qp",
|
"llvm.ppc.scalar.insert.exp.qp" => "__builtin_vsx_scalar_insert_exp_qp",
|
||||||
"llvm.ppc.set.texasr" => "__builtin_set_texasr",
|
"llvm.ppc.set.texasr" => "__builtin_set_texasr",
|
||||||
|
@ -5912,6 +5943,8 @@ match name {
|
||||||
"llvm.s390.vupllb" => "__builtin_s390_vupllb",
|
"llvm.s390.vupllb" => "__builtin_s390_vupllb",
|
||||||
"llvm.s390.vupllf" => "__builtin_s390_vupllf",
|
"llvm.s390.vupllf" => "__builtin_s390_vupllf",
|
||||||
"llvm.s390.vupllh" => "__builtin_s390_vupllh",
|
"llvm.s390.vupllh" => "__builtin_s390_vupllh",
|
||||||
|
// spv
|
||||||
|
"llvm.spv.create.handle" => "__builtin_hlsl_create_handle",
|
||||||
// ve
|
// ve
|
||||||
"llvm.ve.vl.andm.MMM" => "__builtin_ve_vl_andm_MMM",
|
"llvm.ve.vl.andm.MMM" => "__builtin_ve_vl_andm_MMM",
|
||||||
"llvm.ve.vl.andm.mmm" => "__builtin_ve_vl_andm_mmm",
|
"llvm.ve.vl.andm.mmm" => "__builtin_ve_vl_andm_mmm",
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(
|
||||||
// Some LLVM intrinsics do not map 1-to-1 to GCC intrinsics, so we add the missing
|
// Some LLVM intrinsics do not map 1-to-1 to GCC intrinsics, so we add the missing
|
||||||
// arguments here.
|
// arguments here.
|
||||||
if gcc_func.get_param_count() != args.len() {
|
if gcc_func.get_param_count() != args.len() {
|
||||||
match &*func_name {
|
match func_name {
|
||||||
// NOTE: the following intrinsics have a different number of parameters in LLVM and GCC.
|
// NOTE: the following intrinsics have a different number of parameters in LLVM and GCC.
|
||||||
"__builtin_ia32_prold512_mask"
|
"__builtin_ia32_prold512_mask"
|
||||||
| "__builtin_ia32_pmuldq512_mask"
|
| "__builtin_ia32_pmuldq512_mask"
|
||||||
|
@ -380,7 +380,7 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match &*func_name {
|
match func_name {
|
||||||
"__builtin_ia32_rndscaless_mask_round" | "__builtin_ia32_rndscalesd_mask_round" => {
|
"__builtin_ia32_rndscaless_mask_round" | "__builtin_ia32_rndscalesd_mask_round" => {
|
||||||
let new_args = args.to_vec();
|
let new_args = args.to_vec();
|
||||||
let arg3_type = gcc_func.get_param_type(2);
|
let arg3_type = gcc_func.get_param_type(2);
|
||||||
|
@ -629,17 +629,22 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function
|
||||||
|
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function<'gcc> {
|
pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function<'gcc> {
|
||||||
match name {
|
let gcc_name = match name {
|
||||||
"llvm.prefetch" => {
|
"llvm.prefetch" => {
|
||||||
let gcc_name = "__builtin_prefetch";
|
let gcc_name = "__builtin_prefetch";
|
||||||
let func = cx.context.get_builtin_function(gcc_name);
|
let func = cx.context.get_builtin_function(gcc_name);
|
||||||
cx.functions.borrow_mut().insert(gcc_name.to_string(), func);
|
cx.functions.borrow_mut().insert(gcc_name.to_string(), func);
|
||||||
return func;
|
return func;
|
||||||
}
|
}
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
let gcc_name = match name {
|
"llvm.aarch64.isb" => {
|
||||||
|
// FIXME: GCC doesn't support __builtin_arm_isb yet, check if this builtin is OK.
|
||||||
|
let gcc_name = "__atomic_thread_fence";
|
||||||
|
let func = cx.context.get_builtin_function(gcc_name);
|
||||||
|
cx.functions.borrow_mut().insert(gcc_name.to_string(), func);
|
||||||
|
return func;
|
||||||
|
}
|
||||||
|
|
||||||
"llvm.x86.xgetbv" => "__builtin_ia32_xgetbv",
|
"llvm.x86.xgetbv" => "__builtin_ia32_xgetbv",
|
||||||
// NOTE: this doc specifies the equivalent GCC builtins: http://huonw.github.io/llvmint/llvmint/x86/index.html
|
// NOTE: this doc specifies the equivalent GCC builtins: http://huonw.github.io/llvmint/llvmint/x86/index.html
|
||||||
"llvm.sqrt.v2f64" => "__builtin_ia32_sqrtpd",
|
"llvm.sqrt.v2f64" => "__builtin_ia32_sqrtpd",
|
||||||
|
|
|
@ -91,7 +91,7 @@ fn get_simple_intrinsic<'gcc, 'tcx>(
|
||||||
sym::abort => "abort",
|
sym::abort => "abort",
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
Some(cx.context.get_builtin_function(&gcc_name))
|
Some(cx.context.get_builtin_function(gcc_name))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
|
@ -122,10 +122,17 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
|
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
|
||||||
|
|
||||||
let simple = get_simple_intrinsic(self, name);
|
let simple = get_simple_intrinsic(self, name);
|
||||||
|
|
||||||
|
// FIXME(tempdragon): Re-enable `clippy::suspicious_else_formatting` if the following issue is solved:
|
||||||
|
// https://github.com/rust-lang/rust-clippy/issues/12497
|
||||||
|
// and leave `else if use_integer_compare` to be placed "as is".
|
||||||
|
#[allow(clippy::suspicious_else_formatting)]
|
||||||
let llval = match name {
|
let llval = match name {
|
||||||
_ if simple.is_some() => {
|
_ if simple.is_some() => {
|
||||||
// FIXME(antoyo): remove this cast when the API supports function.
|
// FIXME(antoyo): remove this cast when the API supports function.
|
||||||
let func = unsafe { std::mem::transmute(simple.expect("simple")) };
|
let func = unsafe {
|
||||||
|
std::mem::transmute::<Function<'gcc>, RValue<'gcc>>(simple.expect("simple"))
|
||||||
|
};
|
||||||
self.call(
|
self.call(
|
||||||
self.type_void(),
|
self.type_void(),
|
||||||
None,
|
None,
|
||||||
|
@ -167,7 +174,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
sym::volatile_load | sym::unaligned_volatile_load => {
|
sym::volatile_load | sym::unaligned_volatile_load => {
|
||||||
let tp_ty = fn_args.type_at(0);
|
let tp_ty = fn_args.type_at(0);
|
||||||
let ptr = args[0].immediate();
|
let ptr = args[0].immediate();
|
||||||
let load = if let PassMode::Cast { cast: ty, pad_i32: _ } = &fn_abi.ret.mode {
|
let load = if let PassMode::Cast { cast: ref ty, pad_i32: _ } = fn_abi.ret.mode {
|
||||||
let gcc_ty = ty.gcc_type(self);
|
let gcc_ty = ty.gcc_type(self);
|
||||||
self.volatile_load(gcc_ty, ptr)
|
self.volatile_load(gcc_ty, ptr)
|
||||||
} else {
|
} else {
|
||||||
|
@ -213,12 +220,12 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
let after_block = func.new_block("after");
|
let after_block = func.new_block("after");
|
||||||
|
|
||||||
let arg = args[0].immediate();
|
let arg = args[0].immediate();
|
||||||
let result = func.new_local(None, arg.get_type(), "zeros");
|
let result = func.new_local(None, self.u32_type, "zeros");
|
||||||
let zero = self.cx.gcc_zero(arg.get_type());
|
let zero = self.cx.gcc_zero(arg.get_type());
|
||||||
let cond = self.gcc_icmp(IntPredicate::IntEQ, arg, zero);
|
let cond = self.gcc_icmp(IntPredicate::IntEQ, arg, zero);
|
||||||
self.llbb().end_with_conditional(None, cond, then_block, else_block);
|
self.llbb().end_with_conditional(None, cond, then_block, else_block);
|
||||||
|
|
||||||
let zero_result = self.cx.gcc_uint(arg.get_type(), width);
|
let zero_result = self.cx.gcc_uint(self.u32_type, width);
|
||||||
then_block.add_assignment(None, result, zero_result);
|
then_block.add_assignment(None, result, zero_result);
|
||||||
then_block.end_with_jump(None, after_block);
|
then_block.end_with_jump(None, after_block);
|
||||||
|
|
||||||
|
@ -386,7 +393,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||||
};
|
};
|
||||||
|
|
||||||
if !fn_abi.ret.is_ignore() {
|
if !fn_abi.ret.is_ignore() {
|
||||||
if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode {
|
if let PassMode::Cast { cast: ref ty, .. } = fn_abi.ret.mode {
|
||||||
let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
|
let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
|
||||||
let ptr = self.pointercast(result.val.llval, ptr_llty);
|
let ptr = self.pointercast(result.val.llval, ptr_llty);
|
||||||
self.store(llval, ptr, result.val.align);
|
self.store(llval, ptr, result.val.align);
|
||||||
|
@ -592,7 +599,7 @@ fn int_type_width_signed<'gcc, 'tcx>(
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
cx: &CodegenCx<'gcc, 'tcx>,
|
cx: &CodegenCx<'gcc, 'tcx>,
|
||||||
) -> Option<(u64, bool)> {
|
) -> Option<(u64, bool)> {
|
||||||
match ty.kind() {
|
match *ty.kind() {
|
||||||
ty::Int(t) => Some((
|
ty::Int(t) => Some((
|
||||||
match t {
|
match t {
|
||||||
rustc_middle::ty::IntTy::Isize => u64::from(cx.tcx.sess.target.pointer_width),
|
rustc_middle::ty::IntTy::Isize => u64::from(cx.tcx.sess.target.pointer_width),
|
||||||
|
@ -698,16 +705,17 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
fn count_leading_zeroes(&mut self, width: u64, arg: RValue<'gcc>) -> RValue<'gcc> {
|
fn count_leading_zeroes(&mut self, width: u64, arg: RValue<'gcc>) -> RValue<'gcc> {
|
||||||
// TODO(antoyo): use width?
|
// TODO(antoyo): use width?
|
||||||
let arg_type = arg.get_type();
|
let arg_type = arg.get_type();
|
||||||
|
let result_type = self.u32_type;
|
||||||
let count_leading_zeroes =
|
let count_leading_zeroes =
|
||||||
// TODO(antoyo): write a new function Type::is_compatible_with(&Type) and use it here
|
// TODO(antoyo): write a new function Type::is_compatible_with(&Type) and use it here
|
||||||
// instead of using is_uint().
|
// instead of using is_uint().
|
||||||
if arg_type.is_uint(&self.cx) {
|
if arg_type.is_uint(self.cx) {
|
||||||
"__builtin_clz"
|
"__builtin_clz"
|
||||||
}
|
}
|
||||||
else if arg_type.is_ulong(&self.cx) {
|
else if arg_type.is_ulong(self.cx) {
|
||||||
"__builtin_clzl"
|
"__builtin_clzl"
|
||||||
}
|
}
|
||||||
else if arg_type.is_ulonglong(&self.cx) {
|
else if arg_type.is_ulonglong(self.cx) {
|
||||||
"__builtin_clzll"
|
"__builtin_clzll"
|
||||||
}
|
}
|
||||||
else if width == 128 {
|
else if width == 128 {
|
||||||
|
@ -755,7 +763,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
let res = self.context.new_array_access(self.location, result, index);
|
let res = self.context.new_array_access(self.location, result, index);
|
||||||
|
|
||||||
return self.gcc_int_cast(res.to_rvalue(), arg_type);
|
return self.gcc_int_cast(res.to_rvalue(), result_type);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
let count_leading_zeroes = self.context.get_builtin_function("__builtin_clzll");
|
let count_leading_zeroes = self.context.get_builtin_function("__builtin_clzll");
|
||||||
|
@ -763,17 +771,18 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let diff = self.ulonglong_type.get_size() as i64 - arg_type.get_size() as i64;
|
let diff = self.ulonglong_type.get_size() as i64 - arg_type.get_size() as i64;
|
||||||
let diff = self.context.new_rvalue_from_long(self.int_type, diff * 8);
|
let diff = self.context.new_rvalue_from_long(self.int_type, diff * 8);
|
||||||
let res = self.context.new_call(self.location, count_leading_zeroes, &[arg]) - diff;
|
let res = self.context.new_call(self.location, count_leading_zeroes, &[arg]) - diff;
|
||||||
return self.context.new_cast(self.location, res, arg_type);
|
return self.context.new_cast(self.location, res, result_type);
|
||||||
};
|
};
|
||||||
let count_leading_zeroes = self.context.get_builtin_function(count_leading_zeroes);
|
let count_leading_zeroes = self.context.get_builtin_function(count_leading_zeroes);
|
||||||
let res = self.context.new_call(self.location, count_leading_zeroes, &[arg]);
|
let res = self.context.new_call(self.location, count_leading_zeroes, &[arg]);
|
||||||
self.context.new_cast(self.location, res, arg_type)
|
self.context.new_cast(self.location, res, result_type)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn count_trailing_zeroes(&mut self, _width: u64, arg: RValue<'gcc>) -> RValue<'gcc> {
|
fn count_trailing_zeroes(&mut self, _width: u64, arg: RValue<'gcc>) -> RValue<'gcc> {
|
||||||
let result_type = arg.get_type();
|
let arg_type = arg.get_type();
|
||||||
let arg = if result_type.is_signed(self.cx) {
|
let result_type = self.u32_type;
|
||||||
let new_type = result_type.to_unsigned(self.cx);
|
let arg = if arg_type.is_signed(self.cx) {
|
||||||
|
let new_type = arg_type.to_unsigned(self.cx);
|
||||||
self.gcc_int_cast(arg, new_type)
|
self.gcc_int_cast(arg, new_type)
|
||||||
} else {
|
} else {
|
||||||
arg
|
arg
|
||||||
|
@ -782,17 +791,17 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
let (count_trailing_zeroes, expected_type) =
|
let (count_trailing_zeroes, expected_type) =
|
||||||
// TODO(antoyo): write a new function Type::is_compatible_with(&Type) and use it here
|
// TODO(antoyo): write a new function Type::is_compatible_with(&Type) and use it here
|
||||||
// instead of using is_uint().
|
// instead of using is_uint().
|
||||||
if arg_type.is_uchar(&self.cx) || arg_type.is_ushort(&self.cx) || arg_type.is_uint(&self.cx) {
|
if arg_type.is_uchar(self.cx) || arg_type.is_ushort(self.cx) || arg_type.is_uint(self.cx) {
|
||||||
// NOTE: we don't need to & 0xFF for uchar because the result is undefined on zero.
|
// NOTE: we don't need to & 0xFF for uchar because the result is undefined on zero.
|
||||||
("__builtin_ctz", self.cx.uint_type)
|
("__builtin_ctz", self.cx.uint_type)
|
||||||
}
|
}
|
||||||
else if arg_type.is_ulong(&self.cx) {
|
else if arg_type.is_ulong(self.cx) {
|
||||||
("__builtin_ctzl", self.cx.ulong_type)
|
("__builtin_ctzl", self.cx.ulong_type)
|
||||||
}
|
}
|
||||||
else if arg_type.is_ulonglong(&self.cx) {
|
else if arg_type.is_ulonglong(self.cx) {
|
||||||
("__builtin_ctzll", self.cx.ulonglong_type)
|
("__builtin_ctzll", self.cx.ulonglong_type)
|
||||||
}
|
}
|
||||||
else if arg_type.is_u128(&self.cx) {
|
else if arg_type.is_u128(self.cx) {
|
||||||
// Adapted from the algorithm to count leading zeroes from: https://stackoverflow.com/a/28433850/389119
|
// Adapted from the algorithm to count leading zeroes from: https://stackoverflow.com/a/28433850/389119
|
||||||
let array_type = self.context.new_array_type(None, arg_type, 3);
|
let array_type = self.context.new_array_type(None, arg_type, 3);
|
||||||
let result = self.current_func()
|
let result = self.current_func()
|
||||||
|
@ -863,18 +872,16 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
fn pop_count(&mut self, value: RValue<'gcc>) -> RValue<'gcc> {
|
fn pop_count(&mut self, value: RValue<'gcc>) -> RValue<'gcc> {
|
||||||
// TODO(antoyo): use the optimized version with fewer operations.
|
// TODO(antoyo): use the optimized version with fewer operations.
|
||||||
let result_type = value.get_type();
|
let result_type = self.u32_type;
|
||||||
let value_type = result_type.to_unsigned(self.cx);
|
let arg_type = value.get_type();
|
||||||
|
let value_type = arg_type.to_unsigned(self.cx);
|
||||||
|
|
||||||
let value = if result_type.is_signed(self.cx) {
|
let value =
|
||||||
self.gcc_int_cast(value, value_type)
|
if arg_type.is_signed(self.cx) { self.gcc_int_cast(value, value_type) } else { value };
|
||||||
} else {
|
|
||||||
value
|
|
||||||
};
|
|
||||||
|
|
||||||
// only break apart 128-bit ints if they're not natively supported
|
// only break apart 128-bit ints if they're not natively supported
|
||||||
// TODO(antoyo): remove this if/when native 128-bit integers land in libgccjit
|
// TODO(antoyo): remove this if/when native 128-bit integers land in libgccjit
|
||||||
if value_type.is_u128(&self.cx) && !self.cx.supports_128bit_integers {
|
if value_type.is_u128(self.cx) && !self.cx.supports_128bit_integers {
|
||||||
let sixty_four = self.gcc_int(value_type, 64);
|
let sixty_four = self.gcc_int(value_type, 64);
|
||||||
let right_shift = self.gcc_lshr(value, sixty_four);
|
let right_shift = self.gcc_lshr(value, sixty_four);
|
||||||
let high = self.gcc_int_cast(right_shift, self.cx.ulonglong_type);
|
let high = self.gcc_int_cast(right_shift, self.cx.ulonglong_type);
|
||||||
|
@ -997,7 +1004,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||||
|
|
||||||
// Return `result_type`'s maximum or minimum value on overflow
|
// Return `result_type`'s maximum or minimum value on overflow
|
||||||
// NOTE: convert the type to unsigned to have an unsigned shift.
|
// NOTE: convert the type to unsigned to have an unsigned shift.
|
||||||
let unsigned_type = result_type.to_unsigned(&self.cx);
|
let unsigned_type = result_type.to_unsigned(self.cx);
|
||||||
let shifted = self.gcc_lshr(
|
let shifted = self.gcc_lshr(
|
||||||
self.gcc_int_cast(lhs, unsigned_type),
|
self.gcc_int_cast(lhs, unsigned_type),
|
||||||
self.gcc_int(unsigned_type, width as i64 - 1),
|
self.gcc_int(unsigned_type, width as i64 - 1),
|
||||||
|
@ -1189,7 +1196,7 @@ fn codegen_gnu_try<'gcc>(
|
||||||
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None);
|
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None);
|
||||||
});
|
});
|
||||||
|
|
||||||
let func = unsafe { std::mem::transmute(func) };
|
let func = unsafe { std::mem::transmute::<Function<'gcc>, RValue<'gcc>>(func) };
|
||||||
|
|
||||||
// Note that no invoke is used here because by definition this function
|
// Note that no invoke is used here because by definition this function
|
||||||
// can't panic (that's what it's catching).
|
// can't panic (that's what it's catching).
|
||||||
|
@ -1263,7 +1270,7 @@ fn gen_fn<'a, 'gcc, 'tcx>(
|
||||||
// FIXME(eddyb) find a nicer way to do this.
|
// FIXME(eddyb) find a nicer way to do this.
|
||||||
cx.linkage.set(FunctionType::Internal);
|
cx.linkage.set(FunctionType::Internal);
|
||||||
let func = cx.declare_fn(name, fn_abi);
|
let func = cx.declare_fn(name, fn_abi);
|
||||||
let func_val = unsafe { std::mem::transmute(func) };
|
let func_val = unsafe { std::mem::transmute::<Function<'gcc>, RValue<'gcc>>(func) };
|
||||||
cx.set_frame_pointer_type(func_val);
|
cx.set_frame_pointer_type(func_val);
|
||||||
cx.apply_target_cpu_attr(func_val);
|
cx.apply_target_cpu_attr(func_val);
|
||||||
let block = Builder::append_block(cx, func_val, "entry-block");
|
let block = Builder::append_block(cx, func_val, "entry-block");
|
||||||
|
|
|
@ -13,6 +13,7 @@ use rustc_codegen_ssa::errors::InvalidMonomorphization;
|
||||||
use rustc_codegen_ssa::mir::operand::OperandRef;
|
use rustc_codegen_ssa::mir::operand::OperandRef;
|
||||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, BuilderMethods};
|
use rustc_codegen_ssa::traits::{BaseTypeMethods, BuilderMethods};
|
||||||
|
#[cfg(feature = "master")]
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_middle::mir::BinOp;
|
use rustc_middle::mir::BinOp;
|
||||||
use rustc_middle::span_bug;
|
use rustc_middle::span_bug;
|
||||||
|
@ -72,11 +73,11 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
let expected_bytes = len / 8 + ((len % 8 > 0) as u64);
|
let expected_bytes = len / 8 + ((len % 8 > 0) as u64);
|
||||||
|
|
||||||
let mask_ty = arg_tys[0];
|
let mask_ty = arg_tys[0];
|
||||||
let mut mask = match mask_ty.kind() {
|
let mut mask = match *mask_ty.kind() {
|
||||||
ty::Int(i) if i.bit_width() == Some(expected_int_bits) => args[0].immediate(),
|
ty::Int(i) if i.bit_width() == Some(expected_int_bits) => args[0].immediate(),
|
||||||
ty::Uint(i) if i.bit_width() == Some(expected_int_bits) => args[0].immediate(),
|
ty::Uint(i) if i.bit_width() == Some(expected_int_bits) => args[0].immediate(),
|
||||||
ty::Array(elem, len)
|
ty::Array(elem, len)
|
||||||
if matches!(elem.kind(), ty::Uint(ty::UintTy::U8))
|
if matches!(*elem.kind(), ty::Uint(ty::UintTy::U8))
|
||||||
&& len.try_eval_target_usize(bx.tcx, ty::ParamEnv::reveal_all())
|
&& len.try_eval_target_usize(bx.tcx, ty::ParamEnv::reveal_all())
|
||||||
== Some(expected_bytes) =>
|
== Some(expected_bytes) =>
|
||||||
{
|
{
|
||||||
|
@ -309,10 +310,9 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
return Ok(bx.context.new_rvalue_from_vector(None, v_type, &elems));
|
return Ok(bx.context.new_rvalue_from_vector(None, v_type, &elems));
|
||||||
} else {
|
|
||||||
// avoid the unnecessary truncation as an optimization.
|
|
||||||
return Ok(bx.context.new_bitcast(None, result, v_type));
|
|
||||||
}
|
}
|
||||||
|
// avoid the unnecessary truncation as an optimization.
|
||||||
|
return Ok(bx.context.new_bitcast(None, result, v_type));
|
||||||
}
|
}
|
||||||
// since gcc doesn't have vector shuffle methods available in non-patched builds, fallback to
|
// since gcc doesn't have vector shuffle methods available in non-patched builds, fallback to
|
||||||
// component-wise bitreverses if they're not available.
|
// component-wise bitreverses if they're not available.
|
||||||
|
@ -342,11 +342,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
.map(|i| {
|
.map(|i| {
|
||||||
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
|
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
|
||||||
let value = bx.extract_element(vector, index).to_rvalue();
|
let value = bx.extract_element(vector, index).to_rvalue();
|
||||||
if name == sym::simd_ctlz {
|
let value_type = value.get_type();
|
||||||
bx.count_leading_zeroes(value.get_type().get_size() as u64 * 8, value)
|
let element = if name == sym::simd_ctlz {
|
||||||
|
bx.count_leading_zeroes(value_type.get_size() as u64 * 8, value)
|
||||||
} else {
|
} else {
|
||||||
bx.count_trailing_zeroes(value.get_type().get_size() as u64 * 8, value)
|
bx.count_trailing_zeroes(value_type.get_size() as u64 * 8, value)
|
||||||
}
|
};
|
||||||
|
bx.context.new_cast(None, element, value_type)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
return Ok(bx.context.new_rvalue_from_vector(None, vector.get_type(), &elements));
|
return Ok(bx.context.new_rvalue_from_vector(None, vector.get_type(), &elements));
|
||||||
|
@ -355,8 +357,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
if name == sym::simd_shuffle {
|
if name == sym::simd_shuffle {
|
||||||
// Make sure this is actually an array, since typeck only checks the length-suffixed
|
// Make sure this is actually an array, since typeck only checks the length-suffixed
|
||||||
// version of this intrinsic.
|
// version of this intrinsic.
|
||||||
let n: u64 = match args[2].layout.ty.kind() {
|
let n: u64 = match *args[2].layout.ty.kind() {
|
||||||
ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => {
|
ty::Array(ty, len) if matches!(*ty.kind(), ty::Uint(ty::UintTy::U32)) => {
|
||||||
len.try_eval_target_usize(bx.cx.tcx, ty::ParamEnv::reveal_all()).unwrap_or_else(
|
len.try_eval_target_usize(bx.cx.tcx, ty::ParamEnv::reveal_all()).unwrap_or_else(
|
||||||
|| span_bug!(span, "could not evaluate shuffle index array length"),
|
|| span_bug!(span, "could not evaluate shuffle index array length"),
|
||||||
)
|
)
|
||||||
|
@ -429,13 +431,148 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
m_len == v_len,
|
m_len == v_len,
|
||||||
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
|
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
|
||||||
);
|
);
|
||||||
match m_elem_ty.kind() {
|
match *m_elem_ty.kind() {
|
||||||
ty::Int(_) => {}
|
ty::Int(_) => {}
|
||||||
_ => return_error!(InvalidMonomorphization::MaskType { span, name, ty: m_elem_ty }),
|
_ => return_error!(InvalidMonomorphization::MaskType { span, name, ty: m_elem_ty }),
|
||||||
}
|
}
|
||||||
return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate()));
|
return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if name == sym::simd_cast_ptr {
|
||||||
|
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
|
||||||
|
let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx());
|
||||||
|
|
||||||
|
require!(
|
||||||
|
in_len == out_len,
|
||||||
|
InvalidMonomorphization::ReturnLengthInputType {
|
||||||
|
span,
|
||||||
|
name,
|
||||||
|
in_len,
|
||||||
|
in_ty,
|
||||||
|
ret_ty,
|
||||||
|
out_len
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
match *in_elem.kind() {
|
||||||
|
ty::RawPtr(p_ty, _) => {
|
||||||
|
let metadata = p_ty.ptr_metadata_ty(bx.tcx, |ty| {
|
||||||
|
bx.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), ty)
|
||||||
|
});
|
||||||
|
require!(
|
||||||
|
metadata.is_unit(),
|
||||||
|
InvalidMonomorphization::CastFatPointer { span, name, ty: in_elem }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return_error!(InvalidMonomorphization::ExpectedPointer { span, name, ty: in_elem })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match *out_elem.kind() {
|
||||||
|
ty::RawPtr(p_ty, _) => {
|
||||||
|
let metadata = p_ty.ptr_metadata_ty(bx.tcx, |ty| {
|
||||||
|
bx.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), ty)
|
||||||
|
});
|
||||||
|
require!(
|
||||||
|
metadata.is_unit(),
|
||||||
|
InvalidMonomorphization::CastFatPointer { span, name, ty: out_elem }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return_error!(InvalidMonomorphization::ExpectedPointer { span, name, ty: out_elem })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let arg = args[0].immediate();
|
||||||
|
let elem_type = llret_ty.dyncast_vector().expect("vector return type").get_element_type();
|
||||||
|
let values: Vec<_> = (0..in_len)
|
||||||
|
.map(|i| {
|
||||||
|
let idx = bx.gcc_int(bx.usize_type, i as _);
|
||||||
|
let value = bx.extract_element(arg, idx);
|
||||||
|
bx.pointercast(value, elem_type)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
return Ok(bx.context.new_rvalue_from_vector(bx.location, llret_ty, &values));
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == sym::simd_expose_provenance {
|
||||||
|
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
|
||||||
|
let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx());
|
||||||
|
|
||||||
|
require!(
|
||||||
|
in_len == out_len,
|
||||||
|
InvalidMonomorphization::ReturnLengthInputType {
|
||||||
|
span,
|
||||||
|
name,
|
||||||
|
in_len,
|
||||||
|
in_ty,
|
||||||
|
ret_ty,
|
||||||
|
out_len
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
match *in_elem.kind() {
|
||||||
|
ty::RawPtr(_, _) => {}
|
||||||
|
_ => {
|
||||||
|
return_error!(InvalidMonomorphization::ExpectedPointer { span, name, ty: in_elem })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match *out_elem.kind() {
|
||||||
|
ty::Uint(ty::UintTy::Usize) => {}
|
||||||
|
_ => return_error!(InvalidMonomorphization::ExpectedUsize { span, name, ty: out_elem }),
|
||||||
|
}
|
||||||
|
|
||||||
|
let arg = args[0].immediate();
|
||||||
|
let elem_type = llret_ty.dyncast_vector().expect("vector return type").get_element_type();
|
||||||
|
let values: Vec<_> = (0..in_len)
|
||||||
|
.map(|i| {
|
||||||
|
let idx = bx.gcc_int(bx.usize_type, i as _);
|
||||||
|
let value = bx.extract_element(arg, idx);
|
||||||
|
bx.ptrtoint(value, elem_type)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
return Ok(bx.context.new_rvalue_from_vector(bx.location, llret_ty, &values));
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == sym::simd_with_exposed_provenance {
|
||||||
|
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
|
||||||
|
let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx());
|
||||||
|
|
||||||
|
require!(
|
||||||
|
in_len == out_len,
|
||||||
|
InvalidMonomorphization::ReturnLengthInputType {
|
||||||
|
span,
|
||||||
|
name,
|
||||||
|
in_len,
|
||||||
|
in_ty,
|
||||||
|
ret_ty,
|
||||||
|
out_len
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
match *in_elem.kind() {
|
||||||
|
ty::Uint(ty::UintTy::Usize) => {}
|
||||||
|
_ => return_error!(InvalidMonomorphization::ExpectedUsize { span, name, ty: in_elem }),
|
||||||
|
}
|
||||||
|
match *out_elem.kind() {
|
||||||
|
ty::RawPtr(_, _) => {}
|
||||||
|
_ => {
|
||||||
|
return_error!(InvalidMonomorphization::ExpectedPointer { span, name, ty: out_elem })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let arg = args[0].immediate();
|
||||||
|
let elem_type = llret_ty.dyncast_vector().expect("vector return type").get_element_type();
|
||||||
|
let values: Vec<_> = (0..in_len)
|
||||||
|
.map(|i| {
|
||||||
|
let idx = bx.gcc_int(bx.usize_type, i as _);
|
||||||
|
let value = bx.extract_element(arg, idx);
|
||||||
|
bx.inttoptr(value, elem_type)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
return Ok(bx.context.new_rvalue_from_vector(bx.location, llret_ty, &values));
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
if name == sym::simd_cast || name == sym::simd_as {
|
if name == sym::simd_cast || name == sym::simd_as {
|
||||||
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
|
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
|
||||||
|
@ -462,13 +599,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
Unsupported,
|
Unsupported,
|
||||||
}
|
}
|
||||||
|
|
||||||
let in_style = match in_elem.kind() {
|
let in_style = match *in_elem.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => Style::Int,
|
ty::Int(_) | ty::Uint(_) => Style::Int,
|
||||||
ty::Float(_) => Style::Float,
|
ty::Float(_) => Style::Float,
|
||||||
_ => Style::Unsupported,
|
_ => Style::Unsupported,
|
||||||
};
|
};
|
||||||
|
|
||||||
let out_style = match out_elem.kind() {
|
let out_style = match *out_elem.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => Style::Int,
|
ty::Int(_) | ty::Uint(_) => Style::Int,
|
||||||
ty::Float(_) => Style::Float,
|
ty::Float(_) => Style::Float,
|
||||||
_ => Style::Unsupported,
|
_ => Style::Unsupported,
|
||||||
|
@ -495,7 +632,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
macro_rules! arith_binary {
|
macro_rules! arith_binary {
|
||||||
($($name: ident: $($($p: ident),* => $call: ident),*;)*) => {
|
($($name: ident: $($($p: ident),* => $call: ident),*;)*) => {
|
||||||
$(if name == sym::$name {
|
$(if name == sym::$name {
|
||||||
match in_elem.kind() {
|
match *in_elem.kind() {
|
||||||
$($(ty::$p(_))|* => {
|
$($(ty::$p(_))|* => {
|
||||||
return Ok(bx.$call(args[0].immediate(), args[1].immediate()))
|
return Ok(bx.$call(args[0].immediate(), args[1].immediate()))
|
||||||
})*
|
})*
|
||||||
|
@ -533,7 +670,6 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
let sign_shift = bx.context.new_rvalue_from_int(elem_type, elem_size as i32 - 1);
|
let sign_shift = bx.context.new_rvalue_from_int(elem_type, elem_size as i32 - 1);
|
||||||
let one = bx.context.new_rvalue_one(elem_type);
|
let one = bx.context.new_rvalue_one(elem_type);
|
||||||
|
|
||||||
let mut shift = 0;
|
|
||||||
for i in 0..in_len {
|
for i in 0..in_len {
|
||||||
let elem =
|
let elem =
|
||||||
bx.extract_element(vector, bx.context.new_rvalue_from_int(bx.int_type, i as i32));
|
bx.extract_element(vector, bx.context.new_rvalue_from_int(bx.int_type, i as i32));
|
||||||
|
@ -541,17 +677,16 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
let masked = shifted & one;
|
let masked = shifted & one;
|
||||||
result = result
|
result = result
|
||||||
| (bx.context.new_cast(None, masked, result_type)
|
| (bx.context.new_cast(None, masked, result_type)
|
||||||
<< bx.context.new_rvalue_from_int(result_type, shift));
|
<< bx.context.new_rvalue_from_int(result_type, i as i32));
|
||||||
shift += 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match ret_ty.kind() {
|
match *ret_ty.kind() {
|
||||||
ty::Uint(i) if i.bit_width() == Some(expected_int_bits) => {
|
ty::Uint(i) if i.bit_width() == Some(expected_int_bits) => {
|
||||||
// Zero-extend iN to the bitmask type:
|
// Zero-extend iN to the bitmask type:
|
||||||
return Ok(result);
|
return Ok(result);
|
||||||
}
|
}
|
||||||
ty::Array(elem, len)
|
ty::Array(elem, len)
|
||||||
if matches!(elem.kind(), ty::Uint(ty::UintTy::U8))
|
if matches!(*elem.kind(), ty::Uint(ty::UintTy::U8))
|
||||||
&& len.try_eval_target_usize(bx.tcx, ty::ParamEnv::reveal_all())
|
&& len.try_eval_target_usize(bx.tcx, ty::ParamEnv::reveal_all())
|
||||||
== Some(expected_bytes) =>
|
== Some(expected_bytes) =>
|
||||||
{
|
{
|
||||||
|
@ -590,7 +725,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
return Err(());
|
return Err(());
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
let (elem_ty_str, elem_ty) = if let ty::Float(f) = in_elem.kind() {
|
let (elem_ty_str, elem_ty) = if let ty::Float(ref f) = *in_elem.kind() {
|
||||||
let elem_ty = bx.cx.type_float_from_ty(*f);
|
let elem_ty = bx.cx.type_float_from_ty(*f);
|
||||||
match f.bit_width() {
|
match f.bit_width() {
|
||||||
32 => ("f", elem_ty),
|
32 => ("f", elem_ty),
|
||||||
|
@ -816,7 +951,9 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
let (_, element_ty0) = arg_tys[0].simd_size_and_type(bx.tcx());
|
let (_, element_ty0) = arg_tys[0].simd_size_and_type(bx.tcx());
|
||||||
let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx());
|
let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx());
|
||||||
let (pointer_count, underlying_ty) = match *element_ty1.kind() {
|
let (pointer_count, underlying_ty) = match *element_ty1.kind() {
|
||||||
ty::RawPtr(p_ty, _) if p_ty == in_elem => (ptr_count(element_ty1), non_ptr(element_ty1)),
|
ty::RawPtr(p_ty, _) if p_ty == in_elem => {
|
||||||
|
(ptr_count(element_ty1), non_ptr(element_ty1))
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
require!(
|
require!(
|
||||||
false,
|
false,
|
||||||
|
@ -839,7 +976,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
|
|
||||||
// The element type of the third argument must be a signed integer type of any width:
|
// The element type of the third argument must be a signed integer type of any width:
|
||||||
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
|
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
|
||||||
match element_ty2.kind() {
|
match *element_ty2.kind() {
|
||||||
ty::Int(_) => (),
|
ty::Int(_) => (),
|
||||||
_ => {
|
_ => {
|
||||||
require!(
|
require!(
|
||||||
|
@ -955,7 +1092,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
||||||
|
|
||||||
// The element type of the third argument must be a signed integer type of any width:
|
// The element type of the third argument must be a signed integer type of any width:
|
||||||
match element_ty2.kind() {
|
match *element_ty2.kind() {
|
||||||
ty::Int(_) => (),
|
ty::Int(_) => (),
|
||||||
_ => {
|
_ => {
|
||||||
require!(
|
require!(
|
||||||
|
@ -1013,7 +1150,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
macro_rules! arith_unary {
|
macro_rules! arith_unary {
|
||||||
($($name: ident: $($($p: ident),* => $call: ident),*;)*) => {
|
($($name: ident: $($($p: ident),* => $call: ident),*;)*) => {
|
||||||
$(if name == sym::$name {
|
$(if name == sym::$name {
|
||||||
match in_elem.kind() {
|
match *in_elem.kind() {
|
||||||
$($(ty::$p(_))|* => {
|
$($(ty::$p(_))|* => {
|
||||||
return Ok(bx.$call(args[0].immediate()))
|
return Ok(bx.$call(args[0].immediate()))
|
||||||
})*
|
})*
|
||||||
|
@ -1137,7 +1274,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
ret_ty == in_elem,
|
ret_ty == in_elem,
|
||||||
InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty }
|
InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||||
);
|
);
|
||||||
return match in_elem.kind() {
|
return match *in_elem.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => {
|
ty::Int(_) | ty::Uint(_) => {
|
||||||
let r = bx.vector_reduce_op(args[0].immediate(), $vec_op);
|
let r = bx.vector_reduce_op(args[0].immediate(), $vec_op);
|
||||||
if $ordered {
|
if $ordered {
|
||||||
|
@ -1206,7 +1343,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
ret_ty == in_elem,
|
ret_ty == in_elem,
|
||||||
InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty }
|
InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||||
);
|
);
|
||||||
return match in_elem.kind() {
|
return match *in_elem.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => Ok(bx.$int_red(args[0].immediate())),
|
ty::Int(_) | ty::Uint(_) => Ok(bx.$int_red(args[0].immediate())),
|
||||||
ty::Float(_) => Ok(bx.$float_red(args[0].immediate())),
|
ty::Float(_) => Ok(bx.$float_red(args[0].immediate())),
|
||||||
_ => return_error!(InvalidMonomorphization::UnsupportedSymbol {
|
_ => return_error!(InvalidMonomorphization::UnsupportedSymbol {
|
||||||
|
@ -1235,7 +1372,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
);
|
);
|
||||||
args[0].immediate()
|
args[0].immediate()
|
||||||
} else {
|
} else {
|
||||||
match in_elem.kind() {
|
match *in_elem.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => {}
|
ty::Int(_) | ty::Uint(_) => {}
|
||||||
_ => return_error!(InvalidMonomorphization::UnsupportedSymbol {
|
_ => return_error!(InvalidMonomorphization::UnsupportedSymbol {
|
||||||
span,
|
span,
|
||||||
|
@ -1249,7 +1386,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||||
|
|
||||||
args[0].immediate()
|
args[0].immediate()
|
||||||
};
|
};
|
||||||
return match in_elem.kind() {
|
return match *in_elem.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => {
|
ty::Int(_) | ty::Uint(_) => {
|
||||||
let r = bx.vector_reduce_op(input, $op);
|
let r = bx.vector_reduce_op(input, $op);
|
||||||
Ok(if !$boolean {
|
Ok(if !$boolean {
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
* TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one — https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html).
|
* TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one — https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html).
|
||||||
* For Thin LTO, this might be helpful:
|
* For Thin LTO, this might be helpful:
|
||||||
* In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none.
|
* In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none.
|
||||||
* Or the new incremental LTO?
|
* Or the new incremental LTO (https://www.phoronix.com/news/GCC-Incremental-LTO-Patches)?
|
||||||
*
|
*
|
||||||
* Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
|
* Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
|
||||||
* Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans.
|
* Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans.
|
||||||
|
@ -16,12 +16,13 @@
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(rustdoc_internals)]
|
#![feature(rustdoc_internals)]
|
||||||
#![feature(rustc_private, decl_macro, never_type, trusted_len, hash_raw_entry)]
|
#![feature(rustc_private, decl_macro, never_type, trusted_len, hash_raw_entry, let_chains)]
|
||||||
#![allow(broken_intra_doc_links)]
|
#![allow(broken_intra_doc_links)]
|
||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
#![warn(rust_2018_idioms)]
|
#![warn(rust_2018_idioms)]
|
||||||
#![warn(unused_lifetimes)]
|
#![warn(unused_lifetimes)]
|
||||||
#![deny(clippy::pattern_type_mismatch)]
|
#![deny(clippy::pattern_type_mismatch)]
|
||||||
|
#![allow(clippy::needless_lifetimes)]
|
||||||
|
|
||||||
extern crate rustc_apfloat;
|
extern crate rustc_apfloat;
|
||||||
extern crate rustc_ast;
|
extern crate rustc_ast;
|
||||||
|
@ -73,6 +74,7 @@ mod type_of;
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
use std::ops::Deref;
|
||||||
#[cfg(not(feature = "master"))]
|
#[cfg(not(feature = "master"))]
|
||||||
use std::sync::atomic::AtomicBool;
|
use std::sync::atomic::AtomicBool;
|
||||||
#[cfg(not(feature = "master"))]
|
#[cfg(not(feature = "master"))]
|
||||||
|
@ -80,8 +82,9 @@ use std::sync::atomic::Ordering;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
use back::lto::ThinBuffer;
|
||||||
|
use back::lto::ThinData;
|
||||||
use errors::LTONotSupported;
|
use errors::LTONotSupported;
|
||||||
#[cfg(not(feature = "master"))]
|
|
||||||
use gccjit::CType;
|
use gccjit::CType;
|
||||||
use gccjit::{Context, OptimizationLevel};
|
use gccjit::{Context, OptimizationLevel};
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
|
@ -92,9 +95,7 @@ use rustc_codegen_ssa::back::write::{
|
||||||
CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn,
|
CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn,
|
||||||
};
|
};
|
||||||
use rustc_codegen_ssa::base::codegen_crate;
|
use rustc_codegen_ssa::base::codegen_crate;
|
||||||
use rustc_codegen_ssa::traits::{
|
use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, WriteBackendMethods};
|
||||||
CodegenBackend, ExtraBackendMethods, ThinBufferMethods, WriteBackendMethods,
|
|
||||||
};
|
|
||||||
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::sync::IntoDynSyncSend;
|
use rustc_data_structures::sync::IntoDynSyncSend;
|
||||||
|
@ -139,6 +140,10 @@ impl TargetInfo {
|
||||||
fn supports_128bit_int(&self) -> bool {
|
fn supports_128bit_int(&self) -> bool {
|
||||||
self.supports_128bit_integers.load(Ordering::SeqCst)
|
self.supports_128bit_integers.load(Ordering::SeqCst)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_target_dependent_type(&self, _typ: CType) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -160,6 +165,10 @@ impl LockedTargetInfo {
|
||||||
fn supports_128bit_int(&self) -> bool {
|
fn supports_128bit_int(&self) -> bool {
|
||||||
self.info.lock().expect("lock").supports_128bit_int()
|
self.info.lock().expect("lock").supports_128bit_int()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_target_dependent_type(&self, typ: CType) -> bool {
|
||||||
|
self.info.lock().expect("lock").supports_target_dependent_type(typ)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -188,6 +197,7 @@ impl CodegenBackend for GccCodegenBackend {
|
||||||
|
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
gccjit::set_global_personality_function_name(b"rust_eh_personality\0");
|
gccjit::set_global_personality_function_name(b"rust_eh_personality\0");
|
||||||
|
|
||||||
if sess.lto() == Lto::Thin {
|
if sess.lto() == Lto::Thin {
|
||||||
sess.dcx().emit_warn(LTONotSupported {});
|
sess.dcx().emit_warn(LTONotSupported {});
|
||||||
}
|
}
|
||||||
|
@ -293,7 +303,7 @@ impl ExtraBackendMethods for GccCodegenBackend {
|
||||||
alloc_error_handler_kind: AllocatorKind,
|
alloc_error_handler_kind: AllocatorKind,
|
||||||
) -> Self::Module {
|
) -> Self::Module {
|
||||||
let mut mods = GccContext {
|
let mut mods = GccContext {
|
||||||
context: new_context(tcx),
|
context: Arc::new(SyncContext::new(new_context(tcx))),
|
||||||
should_combine_object_files: false,
|
should_combine_object_files: false,
|
||||||
temp_dir: None,
|
temp_dir: None,
|
||||||
};
|
};
|
||||||
|
@ -323,35 +333,42 @@ impl ExtraBackendMethods for GccCodegenBackend {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ThinBuffer;
|
|
||||||
|
|
||||||
impl ThinBufferMethods for ThinBuffer {
|
|
||||||
fn data(&self) -> &[u8] {
|
|
||||||
unimplemented!();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn thin_link_data(&self) -> &[u8] {
|
|
||||||
unimplemented!();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct GccContext {
|
pub struct GccContext {
|
||||||
context: Context<'static>,
|
context: Arc<SyncContext>,
|
||||||
should_combine_object_files: bool,
|
should_combine_object_files: bool,
|
||||||
// Temporary directory used by LTO. We keep it here so that it's not removed before linking.
|
// Temporary directory used by LTO. We keep it here so that it's not removed before linking.
|
||||||
temp_dir: Option<TempDir>,
|
temp_dir: Option<TempDir>,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Send for GccContext {}
|
struct SyncContext {
|
||||||
// FIXME(antoyo): that shouldn't be Sync. Parallel compilation is currently disabled with "-Zno-parallel-llvm". Try to disable it here.
|
context: Context<'static>,
|
||||||
unsafe impl Sync for GccContext {}
|
}
|
||||||
|
|
||||||
|
impl SyncContext {
|
||||||
|
fn new(context: Context<'static>) -> Self {
|
||||||
|
Self { context }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for SyncContext {
|
||||||
|
type Target = Context<'static>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.context
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for SyncContext {}
|
||||||
|
// FIXME(antoyo): that shouldn't be Sync. Parallel compilation is currently disabled with "-Zno-parallel-llvm".
|
||||||
|
// TODO: disable it here by returing false in CodegenBackend::supports_parallel().
|
||||||
|
unsafe impl Sync for SyncContext {}
|
||||||
|
|
||||||
impl WriteBackendMethods for GccCodegenBackend {
|
impl WriteBackendMethods for GccCodegenBackend {
|
||||||
type Module = GccContext;
|
type Module = GccContext;
|
||||||
type TargetMachine = ();
|
type TargetMachine = ();
|
||||||
type TargetMachineError = ();
|
type TargetMachineError = ();
|
||||||
type ModuleBuffer = ModuleBuffer;
|
type ModuleBuffer = ModuleBuffer;
|
||||||
type ThinData = ();
|
type ThinData = ThinData;
|
||||||
type ThinBuffer = ThinBuffer;
|
type ThinBuffer = ThinBuffer;
|
||||||
|
|
||||||
fn run_fat_lto(
|
fn run_fat_lto(
|
||||||
|
@ -363,11 +380,11 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_thin_lto(
|
fn run_thin_lto(
|
||||||
_cgcx: &CodegenContext<Self>,
|
cgcx: &CodegenContext<Self>,
|
||||||
_modules: Vec<(String, Self::ThinBuffer)>,
|
modules: Vec<(String, Self::ThinBuffer)>,
|
||||||
_cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
|
cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
|
||||||
) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError> {
|
) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError> {
|
||||||
unimplemented!();
|
back::lto::run_thin(cgcx, modules, cached_modules)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_pass_timings(&self) {
|
fn print_pass_timings(&self) {
|
||||||
|
@ -397,10 +414,10 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn optimize_thin(
|
unsafe fn optimize_thin(
|
||||||
_cgcx: &CodegenContext<Self>,
|
cgcx: &CodegenContext<Self>,
|
||||||
_thin: ThinModule<Self>,
|
thin: ThinModule<Self>,
|
||||||
) -> Result<ModuleCodegen<Self::Module>, FatalError> {
|
) -> Result<ModuleCodegen<Self::Module>, FatalError> {
|
||||||
unimplemented!();
|
back::lto::optimize_thin_module(thin, cgcx)
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn codegen(
|
unsafe fn codegen(
|
||||||
|
@ -413,10 +430,10 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_thin(
|
fn prepare_thin(
|
||||||
_module: ModuleCodegen<Self::Module>,
|
module: ModuleCodegen<Self::Module>,
|
||||||
_emit_summary: bool,
|
emit_summary: bool,
|
||||||
) -> (String, Self::ThinBuffer) {
|
) -> (String, Self::ThinBuffer) {
|
||||||
unimplemented!();
|
back::lto::prepare_thin(module, emit_summary)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_module(_module: ModuleCodegen<Self::Module>) -> (String, Self::ModuleBuffer) {
|
fn serialize_module(_module: ModuleCodegen<Self::Module>) -> (String, Self::ModuleBuffer) {
|
||||||
|
@ -437,7 +454,8 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||||
pub fn __rustc_codegen_backend() -> Box<dyn CodegenBackend> {
|
pub fn __rustc_codegen_backend() -> Box<dyn CodegenBackend> {
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
let info = {
|
let info = {
|
||||||
// Check whether the target supports 128-bit integers.
|
// Check whether the target supports 128-bit integers, and sized floating point types (like
|
||||||
|
// Float16).
|
||||||
let context = Context::default();
|
let context = Context::default();
|
||||||
Arc::new(Mutex::new(IntoDynSyncSend(context.get_target_info())))
|
Arc::new(Mutex::new(IntoDynSyncSend(context.get_target_info())))
|
||||||
};
|
};
|
||||||
|
@ -467,6 +485,7 @@ pub fn target_features(
|
||||||
allow_unstable: bool,
|
allow_unstable: bool,
|
||||||
target_info: &LockedTargetInfo,
|
target_info: &LockedTargetInfo,
|
||||||
) -> Vec<Symbol> {
|
) -> Vec<Symbol> {
|
||||||
|
// TODO(antoyo): use global_gcc_features.
|
||||||
sess.target
|
sess.target
|
||||||
.supported_target_features()
|
.supported_target_features()
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -477,8 +496,12 @@ pub fn target_features(
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.filter(|_feature| {
|
.filter(|feature| {
|
||||||
target_info.cpu_supports(_feature)
|
// TODO: we disable Neon for now since we don't support the LLVM intrinsics for it.
|
||||||
|
if *feature == "neon" {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
target_info.cpu_supports(feature)
|
||||||
/*
|
/*
|
||||||
adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512fp16, avx512ifma,
|
adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512fp16, avx512ifma,
|
||||||
avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq,
|
avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq,
|
||||||
|
|
|
@ -81,6 +81,6 @@ impl<'gcc, 'tcx> PreDefineMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
// TODO(antoyo): use inline attribute from there in linkage.set() above.
|
// TODO(antoyo): use inline attribute from there in linkage.set() above.
|
||||||
|
|
||||||
self.functions.borrow_mut().insert(symbol_name.to_string(), decl);
|
self.functions.borrow_mut().insert(symbol_name.to_string(), decl);
|
||||||
self.function_instances.borrow_mut().insert(instance, unsafe { std::mem::transmute(decl) });
|
self.function_instances.borrow_mut().insert(instance, decl);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
use gccjit::CType;
|
||||||
use gccjit::{RValue, Struct, Type};
|
use gccjit::{RValue, Struct, Type};
|
||||||
use rustc_codegen_ssa::common::TypeKind;
|
use rustc_codegen_ssa::common::TypeKind;
|
||||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, TypeMembershipMethods};
|
use rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, TypeMembershipMethods};
|
||||||
|
@ -142,25 +147,76 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_f16(&self) -> Type<'gcc> {
|
fn type_f16(&self) -> Type<'gcc> {
|
||||||
unimplemented!("f16_f128")
|
#[cfg(feature = "master")]
|
||||||
|
if self.supports_f16_type {
|
||||||
|
return self.context.new_c_type(CType::Float16);
|
||||||
|
}
|
||||||
|
bug!("unsupported float width 16")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_f32(&self) -> Type<'gcc> {
|
fn type_f32(&self) -> Type<'gcc> {
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
if self.supports_f32_type {
|
||||||
|
return self.context.new_c_type(CType::Float32);
|
||||||
|
}
|
||||||
self.float_type
|
self.float_type
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_f64(&self) -> Type<'gcc> {
|
fn type_f64(&self) -> Type<'gcc> {
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
if self.supports_f64_type {
|
||||||
|
return self.context.new_c_type(CType::Float64);
|
||||||
|
}
|
||||||
self.double_type
|
self.double_type
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_f128(&self) -> Type<'gcc> {
|
fn type_f128(&self) -> Type<'gcc> {
|
||||||
unimplemented!("f16_f128")
|
#[cfg(feature = "master")]
|
||||||
|
if self.supports_f128_type {
|
||||||
|
return self.context.new_c_type(CType::Float128);
|
||||||
|
}
|
||||||
|
bug!("unsupported float width 128")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_func(&self, params: &[Type<'gcc>], return_type: Type<'gcc>) -> Type<'gcc> {
|
fn type_func(&self, params: &[Type<'gcc>], return_type: Type<'gcc>) -> Type<'gcc> {
|
||||||
self.context.new_function_pointer_type(None, return_type, params, false)
|
self.context.new_function_pointer_type(None, return_type, params, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
fn type_kind(&self, typ: Type<'gcc>) -> TypeKind {
|
||||||
|
if self.is_int_type_or_bool(typ) {
|
||||||
|
TypeKind::Integer
|
||||||
|
} else if typ.get_pointee().is_some() {
|
||||||
|
TypeKind::Pointer
|
||||||
|
} else if typ.is_vector() {
|
||||||
|
TypeKind::Vector
|
||||||
|
} else if typ.dyncast_array().is_some() {
|
||||||
|
TypeKind::Array
|
||||||
|
} else if typ.is_struct().is_some() {
|
||||||
|
TypeKind::Struct
|
||||||
|
} else if typ.dyncast_function_ptr_type().is_some() {
|
||||||
|
TypeKind::Function
|
||||||
|
} else if typ.is_compatible_with(self.float_type) {
|
||||||
|
TypeKind::Float
|
||||||
|
} else if typ.is_compatible_with(self.double_type) {
|
||||||
|
TypeKind::Double
|
||||||
|
} else if typ.is_floating_point() {
|
||||||
|
match typ.get_size() {
|
||||||
|
2 => TypeKind::Half,
|
||||||
|
4 => TypeKind::Float,
|
||||||
|
8 => TypeKind::Double,
|
||||||
|
16 => TypeKind::FP128,
|
||||||
|
size => unreachable!("Floating-point type of size {}", size),
|
||||||
|
}
|
||||||
|
} else if typ == self.type_void() {
|
||||||
|
TypeKind::Void
|
||||||
|
} else {
|
||||||
|
// TODO(antoyo): support other types.
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "master"))]
|
||||||
fn type_kind(&self, typ: Type<'gcc>) -> TypeKind {
|
fn type_kind(&self, typ: Type<'gcc>) -> TypeKind {
|
||||||
if self.is_int_type_or_bool(typ) {
|
if self.is_int_type_or_bool(typ) {
|
||||||
TypeKind::Integer
|
TypeKind::Integer
|
||||||
|
@ -170,9 +226,19 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
TypeKind::Double
|
TypeKind::Double
|
||||||
} else if typ.is_vector() {
|
} else if typ.is_vector() {
|
||||||
TypeKind::Vector
|
TypeKind::Vector
|
||||||
|
} else if typ.get_pointee().is_some() {
|
||||||
|
TypeKind::Pointer
|
||||||
|
} else if typ.dyncast_array().is_some() {
|
||||||
|
TypeKind::Array
|
||||||
|
} else if typ.is_struct().is_some() {
|
||||||
|
TypeKind::Struct
|
||||||
|
} else if typ.dyncast_function_ptr_type().is_some() {
|
||||||
|
TypeKind::Function
|
||||||
|
} else if typ == self.type_void() {
|
||||||
|
TypeKind::Void
|
||||||
} else {
|
} else {
|
||||||
// TODO(antoyo): support other types.
|
// TODO(antoyo): support other types.
|
||||||
TypeKind::Void
|
unimplemented!();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,6 +266,16 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "master")]
|
||||||
|
fn float_width(&self, typ: Type<'gcc>) -> usize {
|
||||||
|
if typ.is_floating_point() {
|
||||||
|
(typ.get_size() * u8::BITS).try_into().unwrap()
|
||||||
|
} else {
|
||||||
|
panic!("Cannot get width of float type {:?}", typ);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "master"))]
|
||||||
fn float_width(&self, typ: Type<'gcc>) -> usize {
|
fn float_width(&self, typ: Type<'gcc>) -> usize {
|
||||||
let f32 = self.context.new_type::<f32>();
|
let f32 = self.context.new_type::<f32>();
|
||||||
let f64 = self.context.new_type::<f64>();
|
let f64 = self.context.new_type::<f64>();
|
||||||
|
|
|
@ -8,7 +8,7 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||||
use rustc_middle::ty::{self, CoroutineArgsExt, Ty, TypeVisitableExt};
|
use rustc_middle::ty::{self, CoroutineArgsExt, Ty, TypeVisitableExt};
|
||||||
use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
|
use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
|
||||||
use rustc_target::abi::{
|
use rustc_target::abi::{
|
||||||
self, Abi, Align, FieldsShape, Float, Int, Integer, PointeeInfo, Pointer, Size, TyAbiInterface,
|
self, Abi, FieldsShape, Float, Int, Integer, PointeeInfo, Pointer, Size, TyAbiInterface,
|
||||||
Variants,
|
Variants,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -53,12 +53,6 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> CodegenCx<'a, 'tcx> {
|
|
||||||
pub fn align_of(&self, ty: Ty<'tcx>) -> Align {
|
|
||||||
self.layout_of(ty).align.abi
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn uncached_gcc_type<'gcc, 'tcx>(
|
fn uncached_gcc_type<'gcc, 'tcx>(
|
||||||
cx: &CodegenCx<'gcc, 'tcx>,
|
cx: &CodegenCx<'gcc, 'tcx>,
|
||||||
layout: TyAndLayout<'tcx>,
|
layout: TyAndLayout<'tcx>,
|
||||||
|
@ -90,7 +84,7 @@ fn uncached_gcc_type<'gcc, 'tcx>(
|
||||||
Abi::Uninhabited | Abi::Aggregate { .. } => {}
|
Abi::Uninhabited | Abi::Aggregate { .. } => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = match layout.ty.kind() {
|
let name = match *layout.ty.kind() {
|
||||||
// FIXME(eddyb) producing readable type names for trait objects can result
|
// FIXME(eddyb) producing readable type names for trait objects can result
|
||||||
// in problematically distinct types due to HRTB and subtyping (see #47638).
|
// in problematically distinct types due to HRTB and subtyping (see #47638).
|
||||||
// ty::Dynamic(..) |
|
// ty::Dynamic(..) |
|
||||||
|
@ -220,7 +214,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||||
// to fn_ptr_backend_type handle the on-stack attribute.
|
// to fn_ptr_backend_type handle the on-stack attribute.
|
||||||
// TODO(antoyo): find a less hackish way to hande the on-stack attribute.
|
// TODO(antoyo): find a less hackish way to hande the on-stack attribute.
|
||||||
ty::FnPtr(sig) => {
|
ty::FnPtr(sig) => {
|
||||||
cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty()))
|
cx.fn_ptr_backend_type(cx.fn_abi_of_fn_ptr(sig, ty::List::empty()))
|
||||||
}
|
}
|
||||||
_ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
|
_ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"arch": "m68k",
|
||||||
|
"cpu": "M68020",
|
||||||
|
"crt-static-respected": true,
|
||||||
|
"data-layout": "E-m:e-p:32:16:32-i8:8:8-i16:16:16-i32:16:32-n8:16:32-a:0:16-S16",
|
||||||
|
"dynamic-linking": true,
|
||||||
|
"env": "gnu",
|
||||||
|
"has-rpath": true,
|
||||||
|
"has-thread-local": true,
|
||||||
|
"llvm-target": "m68k-unknown-linux-gnu",
|
||||||
|
"max-atomic-width": 32,
|
||||||
|
"os": "linux",
|
||||||
|
"position-independent-executables": true,
|
||||||
|
"relro-level": "full",
|
||||||
|
"supported-split-debuginfo": [
|
||||||
|
"packed",
|
||||||
|
"unpacked",
|
||||||
|
"off"
|
||||||
|
],
|
||||||
|
"target-endian": "big",
|
||||||
|
"target-family": [
|
||||||
|
"unix"
|
||||||
|
],
|
||||||
|
"target-mcount": "_mcount",
|
||||||
|
"target-pointer-width": "32"
|
||||||
|
}
|
36
compiler/rustc_codegen_gcc/tests/failing-ice-tests.txt
Normal file
36
compiler/rustc_codegen_gcc/tests/failing-ice-tests.txt
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
tests/ui/treat-err-as-bug/span_delayed_bug.rs
|
||||||
|
tests/ui/treat-err-as-bug/err.rs
|
||||||
|
tests/ui/simd/not-out-of-bounds.rs
|
||||||
|
tests/ui/simd/monomorphize-shuffle-index.rs
|
||||||
|
tests/ui/simd/masked-load-store-build-fail.rs
|
||||||
|
tests/ui/simd/intrinsic/generic-shuffle.rs
|
||||||
|
tests/ui/simd/intrinsic/generic-elements.rs
|
||||||
|
tests/ui/simd/intrinsic/generic-cast.rs
|
||||||
|
tests/ui/simd/intrinsic/generic-arithmetic-saturating-2.rs
|
||||||
|
tests/ui/simd/intrinsic/generic-arithmetic-2.rs
|
||||||
|
tests/ui/panics/default-backtrace-ice.rs
|
||||||
|
tests/ui/mir/lint/storage-live.rs
|
||||||
|
tests/ui/layout/valid_range_oob.rs
|
||||||
|
tests/ui/higher-ranked/trait-bounds/future.rs
|
||||||
|
tests/ui/consts/const-eval/const-eval-query-stack.rs
|
||||||
|
tests/ui/simd/masked-load-store.rs
|
||||||
|
tests/ui/simd/issue-39720.rs
|
||||||
|
tests/ui/simd/intrinsic/ptr-cast.rs
|
||||||
|
tests/ui/sepcomp/sepcomp-statics.rs
|
||||||
|
tests/ui/sepcomp/sepcomp-fns.rs
|
||||||
|
tests/ui/sepcomp/sepcomp-fns-backwards.rs
|
||||||
|
tests/ui/sepcomp/sepcomp-extern.rs
|
||||||
|
tests/ui/sepcomp/sepcomp-cci.rs
|
||||||
|
tests/ui/lto/thin-lto-inlines2.rs
|
||||||
|
tests/ui/lto/weak-works.rs
|
||||||
|
tests/ui/lto/thin-lto-inlines.rs
|
||||||
|
tests/ui/lto/thin-lto-global-allocator.rs
|
||||||
|
tests/ui/lto/msvc-imp-present.rs
|
||||||
|
tests/ui/lto/dylib-works.rs
|
||||||
|
tests/ui/lto/all-crates.rs
|
||||||
|
tests/ui/issues/issue-47364.rs
|
||||||
|
tests/ui/functions-closures/parallel-codegen-closures.rs
|
||||||
|
tests/ui/sepcomp/sepcomp-unwind.rs
|
||||||
|
tests/ui/extern/issue-64655-extern-rust-must-allow-unwind.rs
|
||||||
|
tests/ui/extern/issue-64655-allow-unwind-when-calling-panic-directly.rs
|
||||||
|
tests/ui/unwind-no-uwtable.rs
|
|
@ -30,3 +30,4 @@ tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
|
||||||
tests/ui/macros/stringify.rs
|
tests/ui/macros/stringify.rs
|
||||||
tests/ui/reexport-test-harness-main.rs
|
tests/ui/reexport-test-harness-main.rs
|
||||||
tests/ui/rfcs/rfc-1937-termination-trait/termination-trait-in-test.rs
|
tests/ui/rfcs/rfc-1937-termination-trait/termination-trait-in-test.rs
|
||||||
|
tests/ui/binding/fn-arg-incomplete-pattern-drop-order.rs
|
||||||
|
|
42
compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt
Normal file
42
compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
tests/run-make/a-b-a-linker-guard/
|
||||||
|
tests/run-make/CURRENT_RUSTC_VERSION/
|
||||||
|
tests/run-make/cross-lang-lto/
|
||||||
|
tests/run-make/cross-lang-lto-upstream-rlibs/
|
||||||
|
tests/run-make/doctests-keep-binaries/
|
||||||
|
tests/run-make/doctests-runtool/
|
||||||
|
tests/run-make/emit-shared-files/
|
||||||
|
tests/run-make/exit-code/
|
||||||
|
tests/run-make/issue-22131/
|
||||||
|
tests/run-make/issue-64153/
|
||||||
|
tests/run-make/llvm-ident/
|
||||||
|
tests/run-make/native-link-modifier-bundle/
|
||||||
|
tests/run-make/remap-path-prefix-dwarf/
|
||||||
|
tests/run-make/repr128-dwarf/
|
||||||
|
tests/run-make/rlib-format-packed-bundled-libs/
|
||||||
|
tests/run-make/rlib-format-packed-bundled-libs-2/
|
||||||
|
tests/run-make/rustdoc-determinism/
|
||||||
|
tests/run-make/rustdoc-error-lines/
|
||||||
|
tests/run-make/rustdoc-map-file/
|
||||||
|
tests/run-make/rustdoc-output-path/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-invalid-expr/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-multiple/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-ordering/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-remap/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-test/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-whitespace/
|
||||||
|
tests/run-make/rustdoc-scrape-examples-macros/
|
||||||
|
tests/run-make/rustdoc-with-out-dir-option/
|
||||||
|
tests/run-make/rustdoc-verify-output-files/
|
||||||
|
tests/run-make/rustdoc-themes/
|
||||||
|
tests/run-make/rustdoc-with-short-out-dir-option/
|
||||||
|
tests/run-make/rustdoc-with-output-option/
|
||||||
|
tests/run-make/arguments-non-c-like-enum/
|
||||||
|
tests/run-make/c-link-to-rust-staticlib/
|
||||||
|
tests/run-make/foreign-double-unwind/
|
||||||
|
tests/run-make/foreign-exceptions/
|
||||||
|
tests/run-make/glibc-staticlib-args/
|
||||||
|
tests/run-make/issue-36710/
|
||||||
|
tests/run-make/issue-68794-textrel-on-minimal-lib/
|
||||||
|
tests/run-make/lto-smoke-c/
|
||||||
|
tests/run-make/return-non-c-like-enum/
|
||||||
|
|
|
@ -2,7 +2,6 @@ tests/ui/allocator/no_std-alloc-error-handler-custom.rs
|
||||||
tests/ui/allocator/no_std-alloc-error-handler-default.rs
|
tests/ui/allocator/no_std-alloc-error-handler-default.rs
|
||||||
tests/ui/asm/may_unwind.rs
|
tests/ui/asm/may_unwind.rs
|
||||||
tests/ui/asm/x86_64/multiple-clobber-abi.rs
|
tests/ui/asm/x86_64/multiple-clobber-abi.rs
|
||||||
tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs
|
|
||||||
tests/ui/functions-closures/parallel-codegen-closures.rs
|
tests/ui/functions-closures/parallel-codegen-closures.rs
|
||||||
tests/ui/linkage-attr/linkage1.rs
|
tests/ui/linkage-attr/linkage1.rs
|
||||||
tests/ui/lto/dylib-works.rs
|
tests/ui/lto/dylib-works.rs
|
||||||
|
@ -14,13 +13,12 @@ tests/ui/sepcomp/sepcomp-fns-backwards.rs
|
||||||
tests/ui/sepcomp/sepcomp-fns.rs
|
tests/ui/sepcomp/sepcomp-fns.rs
|
||||||
tests/ui/sepcomp/sepcomp-statics.rs
|
tests/ui/sepcomp/sepcomp-statics.rs
|
||||||
tests/ui/asm/x86_64/may_unwind.rs
|
tests/ui/asm/x86_64/may_unwind.rs
|
||||||
tests/ui/backtrace.rs
|
|
||||||
tests/ui/catch-unwind-bang.rs
|
tests/ui/catch-unwind-bang.rs
|
||||||
tests/ui/cfg/cfg-panic-abort.rs
|
|
||||||
tests/ui/drop/dynamic-drop-async.rs
|
tests/ui/drop/dynamic-drop-async.rs
|
||||||
|
tests/ui/cfg/cfg-panic-abort.rs
|
||||||
tests/ui/drop/repeat-drop.rs
|
tests/ui/drop/repeat-drop.rs
|
||||||
tests/ui/fmt/format-args-capture.rs
|
|
||||||
tests/ui/coroutine/panic-drops-resume.rs
|
tests/ui/coroutine/panic-drops-resume.rs
|
||||||
|
tests/ui/fmt/format-args-capture.rs
|
||||||
tests/ui/coroutine/panic-drops.rs
|
tests/ui/coroutine/panic-drops.rs
|
||||||
tests/ui/intrinsics/panic-uninitialized-zeroed.rs
|
tests/ui/intrinsics/panic-uninitialized-zeroed.rs
|
||||||
tests/ui/iterators/iter-sum-overflow-debug.rs
|
tests/ui/iterators/iter-sum-overflow-debug.rs
|
||||||
|
@ -34,12 +32,8 @@ tests/ui/panic-runtime/abort.rs
|
||||||
tests/ui/panic-runtime/link-to-abort.rs
|
tests/ui/panic-runtime/link-to-abort.rs
|
||||||
tests/ui/unwind-no-uwtable.rs
|
tests/ui/unwind-no-uwtable.rs
|
||||||
tests/ui/parser/unclosed-delimiter-in-dep.rs
|
tests/ui/parser/unclosed-delimiter-in-dep.rs
|
||||||
tests/ui/runtime/rt-explody-panic-payloads.rs
|
|
||||||
tests/ui/simd/intrinsic/ptr-cast.rs
|
|
||||||
tests/ui/binding/fn-arg-incomplete-pattern-drop-order.rs
|
|
||||||
tests/ui/consts/missing_span_in_backtrace.rs
|
tests/ui/consts/missing_span_in_backtrace.rs
|
||||||
tests/ui/drop/dynamic-drop.rs
|
tests/ui/drop/dynamic-drop.rs
|
||||||
tests/ui/dyn-star/box.rs
|
|
||||||
tests/ui/issues/issue-40883.rs
|
tests/ui/issues/issue-40883.rs
|
||||||
tests/ui/issues/issue-43853.rs
|
tests/ui/issues/issue-43853.rs
|
||||||
tests/ui/issues/issue-47364.rs
|
tests/ui/issues/issue-47364.rs
|
||||||
|
@ -48,29 +42,56 @@ tests/ui/rfcs/rfc-1857-stabilize-drop-order/drop-order.rs
|
||||||
tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
|
tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
|
||||||
tests/ui/simd/issue-17170.rs
|
tests/ui/simd/issue-17170.rs
|
||||||
tests/ui/simd/issue-39720.rs
|
tests/ui/simd/issue-39720.rs
|
||||||
tests/ui/statics/issue-91050-1.rs
|
|
||||||
tests/ui/statics/issue-91050-2.rs
|
|
||||||
tests/ui/alloc-error/default-alloc-error-hook.rs
|
tests/ui/alloc-error/default-alloc-error-hook.rs
|
||||||
tests/ui/coroutine/panic-safe.rs
|
tests/ui/coroutine/panic-safe.rs
|
||||||
tests/ui/issues/issue-14875.rs
|
tests/ui/issues/issue-14875.rs
|
||||||
tests/ui/issues/issue-29948.rs
|
tests/ui/issues/issue-29948.rs
|
||||||
tests/ui/panics/nested_panic_caught.rs
|
tests/ui/panics/nested_panic_caught.rs
|
||||||
tests/ui/const_prop/ice-issue-111353.rs
|
|
||||||
tests/ui/process/println-with-broken-pipe.rs
|
tests/ui/process/println-with-broken-pipe.rs
|
||||||
tests/ui/panic-runtime/lto-abort.rs
|
|
||||||
tests/ui/lto/thin-lto-inlines2.rs
|
tests/ui/lto/thin-lto-inlines2.rs
|
||||||
tests/ui/lto/weak-works.rs
|
tests/ui/lto/weak-works.rs
|
||||||
|
tests/ui/panic-runtime/lto-abort.rs
|
||||||
tests/ui/lto/thin-lto-inlines.rs
|
tests/ui/lto/thin-lto-inlines.rs
|
||||||
tests/ui/lto/thin-lto-global-allocator.rs
|
tests/ui/lto/thin-lto-global-allocator.rs
|
||||||
tests/ui/lto/msvc-imp-present.rs
|
tests/ui/lto/msvc-imp-present.rs
|
||||||
tests/ui/lto/lto-thin-rustc-loads-linker-plugin.rs
|
tests/ui/lto/lto-thin-rustc-loads-linker-plugin.rs
|
||||||
tests/ui/lto/all-crates.rs
|
tests/ui/lto/all-crates.rs
|
||||||
tests/ui/async-await/deep-futures-are-freeze.rs
|
tests/ui/async-await/deep-futures-are-freeze.rs
|
||||||
tests/ui/closures/capture-unsized-by-ref.rs
|
|
||||||
tests/ui/coroutine/resume-after-return.rs
|
tests/ui/coroutine/resume-after-return.rs
|
||||||
tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs
|
|
||||||
tests/ui/simd/masked-load-store.rs
|
tests/ui/simd/masked-load-store.rs
|
||||||
tests/ui/simd/repr_packed.rs
|
tests/ui/simd/repr_packed.rs
|
||||||
tests/ui/async-await/in-trait/dont-project-to-specializable-projection.rs
|
tests/ui/async-await/in-trait/dont-project-to-specializable-projection.rs
|
||||||
tests/ui/consts/try-operator.rs
|
tests/ui/consts/try-operator.rs
|
||||||
tests/ui/coroutine/unwind-abort-mix.rs
|
tests/ui/coroutine/unwind-abort-mix.rs
|
||||||
|
tests/ui/type-alias-impl-trait/rpit_tait_equality_in_canonical_query.rs
|
||||||
|
tests/ui/impl-trait/equality-in-canonical-query.rs
|
||||||
|
tests/ui/consts/issue-miri-1910.rs
|
||||||
|
tests/ui/mir/mir_heavy_promoted.rs
|
||||||
|
tests/ui/consts/const_cmp_type_id.rs
|
||||||
|
tests/ui/consts/issue-73976-monomorphic.rs
|
||||||
|
tests/ui/consts/issue-94675.rs
|
||||||
|
tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail.rs
|
||||||
|
tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs
|
||||||
|
tests/ui/runtime/on-broken-pipe/child-processes.rs
|
||||||
|
tests/ui/sanitizer/cfi-assoc-ty-lifetime-issue-123053.rs
|
||||||
|
tests/ui/sanitizer/cfi-async-closures.rs
|
||||||
|
tests/ui/sanitizer/cfi-closures.rs
|
||||||
|
tests/ui/sanitizer/cfi-complex-receiver.rs
|
||||||
|
tests/ui/sanitizer/cfi-coroutine.rs
|
||||||
|
tests/ui/sanitizer/cfi-drop-in-place.rs
|
||||||
|
tests/ui/sanitizer/cfi-drop-no-principal.rs
|
||||||
|
tests/ui/sanitizer/cfi-fn-ptr.rs
|
||||||
|
tests/ui/sanitizer/cfi-self-ref.rs
|
||||||
|
tests/ui/sanitizer/cfi-supertraits.rs
|
||||||
|
tests/ui/sanitizer/cfi-virtual-auto.rs
|
||||||
|
tests/ui/sanitizer/kcfi-mangling.rs
|
||||||
|
tests/ui/statics/const_generics.rs
|
||||||
|
tests/ui/backtrace/dylib-dep.rs
|
||||||
|
tests/ui/errors/pic-linker.rs
|
||||||
|
tests/ui/delegation/fn-header.rs
|
||||||
|
tests/ui/consts/zst_no_llvm_alloc.rs
|
||||||
|
tests/ui/consts/const-eval/parse_ints.rs
|
||||||
|
tests/ui/simd/intrinsic/generic-arithmetic-pass.rs
|
||||||
|
tests/ui/backtrace/backtrace.rs
|
||||||
|
tests/ui/lifetimes/tail-expr-lock-poisoning.rs
|
||||||
|
tests/ui/runtime/rt-explody-panic-payloads.rs
|
||||||
|
|
4
compiler/rustc_codegen_gcc/tests/hello-world/Cargo.toml
Normal file
4
compiler/rustc_codegen_gcc/tests/hello-world/Cargo.toml
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[package]
|
||||||
|
name = "hello_world"
|
||||||
|
|
||||||
|
[dependencies]
|
3
compiler/rustc_codegen_gcc/tests/hello-world/src/main.rs
Normal file
3
compiler/rustc_codegen_gcc/tests/hello-world/src/main.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
fn main() {
|
||||||
|
println!("Hello, world!");
|
||||||
|
}
|
|
@ -80,8 +80,7 @@ pub fn main_inner(profile: Profile) {
|
||||||
compiler.args([
|
compiler.args([
|
||||||
&format!("-Zcodegen-backend={}/target/debug/librustc_codegen_gcc.so", current_dir),
|
&format!("-Zcodegen-backend={}/target/debug/librustc_codegen_gcc.so", current_dir),
|
||||||
"--sysroot",
|
"--sysroot",
|
||||||
&format!("{}/build_sysroot/sysroot/", current_dir),
|
&format!("{}/build/build_sysroot/sysroot/", current_dir),
|
||||||
"-Zno-parallel-llvm",
|
|
||||||
"-C",
|
"-C",
|
||||||
"link-arg=-lc",
|
"link-arg=-lc",
|
||||||
"-o",
|
"-o",
|
||||||
|
|
|
@ -205,6 +205,17 @@ impl Sub for i16 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_add_overflow"]
|
||||||
|
pub fn panic_const_add_overflow() -> ! {
|
||||||
|
panic("attempt to add with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_sub_overflow"]
|
||||||
|
pub fn panic_const_sub_overflow() -> ! {
|
||||||
|
panic("attempt to subtract with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Code
|
* Code
|
||||||
|
|
|
@ -120,6 +120,12 @@ impl Add for isize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_add_overflow"]
|
||||||
|
pub fn panic_const_add_overflow() -> ! {
|
||||||
|
panic("attempt to add with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Code
|
* Code
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -189,6 +189,12 @@ pub fn panic(_msg: &'static str) -> ! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_add_overflow"]
|
||||||
|
pub fn panic_const_add_overflow() -> ! {
|
||||||
|
panic("attempt to add with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Code
|
* Code
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -122,6 +122,12 @@ impl Add for isize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_add_overflow"]
|
||||||
|
pub fn panic_const_add_overflow() -> ! {
|
||||||
|
panic("attempt to add with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Code
|
* Code
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -207,6 +207,24 @@ impl Mul for isize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_add_overflow"]
|
||||||
|
pub fn panic_const_add_overflow() -> ! {
|
||||||
|
panic("attempt to add with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_sub_overflow"]
|
||||||
|
pub fn panic_const_sub_overflow() -> ! {
|
||||||
|
panic("attempt to subtract with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
#[lang = "panic_const_mul_overflow"]
|
||||||
|
pub fn panic_const_mul_overflow() -> ! {
|
||||||
|
panic("attempt to multiply with overflow");
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Code
|
* Code
|
||||||
*/
|
*/
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue