merge rustc history
This commit is contained in:
commit
9b57a2f55a
4056 changed files with 68452 additions and 45931 deletions
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
|
@ -297,7 +297,7 @@ jobs:
|
|||
os: ubuntu-20.04-xl
|
||||
- name: dist-x86_64-apple
|
||||
env:
|
||||
SCRIPT: "./x.py dist --host=x86_64-apple-darwin --target=x86_64-apple-darwin"
|
||||
SCRIPT: "./x.py dist bootstrap --include-default-paths --host=x86_64-apple-darwin --target=x86_64-apple-darwin"
|
||||
RUST_CONFIGURE_ARGS: "--enable-full-tools --enable-sanitizers --enable-profiler --set rust.jemalloc --set llvm.ninja=false"
|
||||
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.7
|
||||
|
@ -308,7 +308,7 @@ jobs:
|
|||
os: macos-latest
|
||||
- name: dist-apple-various
|
||||
env:
|
||||
SCRIPT: "./x.py dist --host='' --target=aarch64-apple-ios,x86_64-apple-ios,aarch64-apple-ios-sim"
|
||||
SCRIPT: "./x.py dist bootstrap --include-default-paths --host='' --target=aarch64-apple-ios,x86_64-apple-ios,aarch64-apple-ios-sim"
|
||||
RUST_CONFIGURE_ARGS: "--enable-sanitizers --enable-profiler --set rust.jemalloc --set llvm.ninja=false"
|
||||
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.7
|
||||
|
@ -318,7 +318,7 @@ jobs:
|
|||
os: macos-latest
|
||||
- name: dist-x86_64-apple-alt
|
||||
env:
|
||||
SCRIPT: "./x.py dist"
|
||||
SCRIPT: "./x.py dist bootstrap --include-default-paths"
|
||||
RUST_CONFIGURE_ARGS: "--enable-extended --enable-profiler --set rust.jemalloc --set llvm.ninja=false"
|
||||
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.7
|
||||
|
@ -350,7 +350,7 @@ jobs:
|
|||
os: macos-latest
|
||||
- name: dist-aarch64-apple
|
||||
env:
|
||||
SCRIPT: "./x.py dist --stage 2"
|
||||
SCRIPT: "./x.py dist bootstrap --include-default-paths --stage 2"
|
||||
RUST_CONFIGURE_ARGS: "--build=x86_64-apple-darwin --host=aarch64-apple-darwin --target=aarch64-apple-darwin --enable-full-tools --enable-sanitizers --enable-profiler --disable-docs --set rust.jemalloc --set llvm.ninja=false"
|
||||
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
|
||||
USE_XCODE_CLANG: 1
|
||||
|
@ -424,19 +424,19 @@ jobs:
|
|||
- name: dist-x86_64-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --host=x86_64-pc-windows-msvc --target=x86_64-pc-windows-msvc --enable-full-tools --enable-profiler"
|
||||
SCRIPT: PGO_HOST=x86_64-pc-windows-msvc src/ci/pgo.sh python x.py dist
|
||||
SCRIPT: PGO_HOST=x86_64-pc-windows-msvc src/ci/pgo.sh python x.py dist bootstrap --include-default-paths
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
os: windows-latest-xl
|
||||
- name: dist-i686-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: "--build=i686-pc-windows-msvc --host=i686-pc-windows-msvc --target=i686-pc-windows-msvc,i586-pc-windows-msvc --enable-full-tools --enable-profiler"
|
||||
SCRIPT: python x.py dist
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
os: windows-latest-xl
|
||||
- name: dist-aarch64-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --host=aarch64-pc-windows-msvc --enable-full-tools --enable-profiler"
|
||||
SCRIPT: python x.py dist
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
WINDOWS_SDK_20348_HACK: 1
|
||||
os: windows-latest-xl
|
||||
|
@ -444,13 +444,13 @@ jobs:
|
|||
env:
|
||||
RUST_CONFIGURE_ARGS: "--build=i686-pc-windows-gnu --enable-full-tools --enable-profiler --set llvm.allow-old-toolchain"
|
||||
NO_DOWNLOAD_CI_LLVM: 1
|
||||
SCRIPT: python x.py dist
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
CUSTOM_MINGW: 1
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
os: windows-latest-xl
|
||||
- name: dist-x86_64-mingw
|
||||
env:
|
||||
SCRIPT: python x.py dist
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-gnu --enable-full-tools --enable-profiler --set llvm.allow-old-toolchain"
|
||||
NO_DOWNLOAD_CI_LLVM: 1
|
||||
CUSTOM_MINGW: 1
|
||||
|
@ -459,7 +459,7 @@ jobs:
|
|||
- name: dist-x86_64-msvc-alt
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-extended --enable-profiler"
|
||||
SCRIPT: python x.py dist
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
os: windows-latest-xl
|
||||
timeout-minutes: 600
|
||||
runs-on: "${{ matrix.os }}"
|
||||
|
|
396
COPYRIGHT
396
COPYRIGHT
|
@ -23,65 +23,251 @@ The Rust Project includes packages written by third parties.
|
|||
The following third party packages are included, and carry
|
||||
their own copyright notices and license terms:
|
||||
|
||||
* LLVM. Code for this package is found in src/llvm-project.
|
||||
* LLVM, located in src/llvm-project, is licensed under the following
|
||||
terms.
|
||||
|
||||
Copyright (c) 2003-2013 University of Illinois at
|
||||
Urbana-Champaign. All rights reserved.
|
||||
==============================================================================
|
||||
The LLVM Project is under the Apache License v2.0 with LLVM Exceptions:
|
||||
==============================================================================
|
||||
|
||||
Developed by:
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
LLVM Team
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
University of Illinois at Urbana-Champaign
|
||||
1. Definitions.
|
||||
|
||||
http://llvm.org
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal with the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
* Redistributions of source code must retain the
|
||||
above copyright notice, this list of conditions
|
||||
and the following disclaimers.
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
* Redistributions in binary form must reproduce the
|
||||
above copyright notice, this list of conditions
|
||||
and the following disclaimers in the documentation
|
||||
and/or other materials provided with the
|
||||
distribution.
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
* Neither the names of the LLVM Team, University of
|
||||
Illinois at Urbana-Champaign, nor the names of its
|
||||
contributors may be used to endorse or promote
|
||||
products derived from this Software without
|
||||
specific prior written permission.
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE
|
||||
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS WITH THE SOFTWARE.
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
* Additional libraries included in LLVM carry separate
|
||||
BSD-compatible licenses. See src/llvm-project/llvm/LICENSE.TXT
|
||||
for details.
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
* compiler-rt, in src/compiler-rt is dual licensed under
|
||||
LLVM's license and MIT:
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
Copyright (c) 2009-2014 by the contributors listed in
|
||||
CREDITS.TXT
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
---- LLVM Exceptions to the Apache 2.0 License ----
|
||||
|
||||
As an exception, if, as a result of your compiling your source code, portions
|
||||
of this Software are embedded into an Object form of such source code, you
|
||||
may redistribute such embedded portions in such Object form without complying
|
||||
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
|
||||
|
||||
In addition, if you combine or link compiled forms of this Software with
|
||||
software that is licensed under the GPLv2 ("Combined Software") and if a
|
||||
court of competent jurisdiction determines that the patent provision (Section
|
||||
3), the indemnity provision (Section 9) or other Section of the License
|
||||
conflicts with the conditions of the GPLv2, you may retroactively and
|
||||
prospectively choose to deem waived or otherwise exclude such Section(s) of
|
||||
the License, but only in their entirety and only with respect to the Combined
|
||||
Software.
|
||||
|
||||
==============================================================================
|
||||
Software from third parties included in the LLVM Project:
|
||||
==============================================================================
|
||||
The LLVM Project contains third party software which is under different license
|
||||
terms. All such code will be identified clearly using at least one of two
|
||||
mechanisms:
|
||||
1) It will be in a separate directory tree with its own `LICENSE.txt` or
|
||||
`LICENSE` file at the top containing the specific license and restrictions
|
||||
which apply to that software, or
|
||||
2) It will contain specific license and restriction terms at the top of every
|
||||
file.
|
||||
|
||||
==============================================================================
|
||||
Legacy LLVM License (https://llvm.org/docs/DeveloperPolicy.html#legacy):
|
||||
==============================================================================
|
||||
University of Illinois/NCSA
|
||||
Open Source License
|
||||
|
||||
Copyright (c) 2003-2019 University of Illinois at Urbana-Champaign.
|
||||
All rights reserved.
|
||||
|
||||
Developed by:
|
||||
|
@ -92,70 +278,32 @@ their own copyright notices and license terms:
|
|||
|
||||
http://llvm.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal with the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal with
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
* Redistributions of source code must retain the
|
||||
above copyright notice, this list of conditions
|
||||
and the following disclaimers.
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimers.
|
||||
|
||||
* Redistributions in binary form must reproduce the
|
||||
above copyright notice, this list of conditions
|
||||
and the following disclaimers in the documentation
|
||||
and/or other materials provided with the
|
||||
distribution.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimers in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the LLVM Team, University of
|
||||
Illinois at Urbana-Champaign, nor the names of its
|
||||
contributors may be used to endorse or promote
|
||||
products derived from this Software without
|
||||
specific prior written permission.
|
||||
* Neither the names of the LLVM Team, University of Illinois at
|
||||
Urbana-Champaign, nor the names of its contributors may be used to
|
||||
endorse or promote products derived from this Software without specific
|
||||
prior written permission.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE
|
||||
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS WITH THE SOFTWARE.
|
||||
|
||||
========================================================
|
||||
|
||||
Copyright (c) 2009-2014 by the contributors listed in
|
||||
CREDITS.TXT
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
|
||||
SOFTWARE.
|
||||
|
||||
* Portions of the FFI code for interacting with the native ABI
|
||||
is derived from the Clay programming language, which carries
|
||||
|
@ -191,41 +339,3 @@ their own copyright notices and license terms:
|
|||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
|
||||
OF SUCH DAMAGE.
|
||||
|
||||
* libbacktrace, under src/libbacktrace:
|
||||
|
||||
Copyright (C) 2012-2014 Free Software Foundation, Inc.
|
||||
Written by Ian Lance Taylor, Google.
|
||||
|
||||
Redistribution and use in source and binary forms, with
|
||||
or without modification, are permitted provided that the
|
||||
following conditions are met:
|
||||
|
||||
(1) Redistributions of source code must retain the
|
||||
above copyright notice, this list of conditions and
|
||||
the following disclaimer.
|
||||
|
||||
(2) Redistributions in binary form must reproduce
|
||||
the above copyright notice, this list of conditions
|
||||
and the following disclaimer in the documentation
|
||||
and/or other materials provided with the
|
||||
distribution.
|
||||
|
||||
(3) The name of the author may not be used to
|
||||
endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
|
||||
NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
||||
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
|
||||
OF SUCH DAMAGE. */
|
||||
|
|
182
Cargo.lock
182
Cargo.lock
|
@ -288,7 +288,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cargo"
|
||||
version = "0.66.0"
|
||||
version = "0.67.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"atty",
|
||||
|
@ -297,7 +297,7 @@ dependencies = [
|
|||
"cargo-test-macro",
|
||||
"cargo-test-support",
|
||||
"cargo-util",
|
||||
"clap",
|
||||
"clap 4.0.15",
|
||||
"crates-io",
|
||||
"curl",
|
||||
"curl-sys",
|
||||
|
@ -330,8 +330,10 @@ dependencies = [
|
|||
"pretty_env_logger",
|
||||
"rustc-workspace-hack",
|
||||
"rustfix",
|
||||
"same-file",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde-value",
|
||||
"serde_ignored",
|
||||
"serde_json",
|
||||
"shell-escape",
|
||||
|
@ -385,7 +387,7 @@ dependencies = [
|
|||
"directories",
|
||||
"rustc-build-sysroot",
|
||||
"rustc-workspace-hack",
|
||||
"rustc_tools_util 0.2.1",
|
||||
"rustc_tools_util",
|
||||
"rustc_version",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -418,6 +420,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"cargo-test-macro",
|
||||
"cargo-util",
|
||||
"crates-io",
|
||||
"filetime",
|
||||
"flate2",
|
||||
"git2",
|
||||
|
@ -436,7 +439,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cargo-util"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"core-foundation",
|
||||
|
@ -589,7 +592,7 @@ dependencies = [
|
|||
"atty",
|
||||
"bitflags",
|
||||
"clap_derive",
|
||||
"clap_lex",
|
||||
"clap_lex 0.2.2",
|
||||
"indexmap",
|
||||
"once_cell",
|
||||
"strsim",
|
||||
|
@ -597,13 +600,26 @@ dependencies = [
|
|||
"textwrap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6bf8832993da70a4c6d13c581f4463c2bdda27b9bf1c5498dc4365543abe6d6f"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"bitflags",
|
||||
"clap_lex 0.3.0",
|
||||
"strsim",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "3.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df6f3613c0a3cddfd78b41b10203eb322cb29b600cbdf808a7d3db95691b8e25"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -628,9 +644,18 @@ dependencies = [
|
|||
"os_str_bytes",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
|
||||
dependencies = [
|
||||
"os_str_bytes",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clippy"
|
||||
version = "0.1.65"
|
||||
version = "0.1.66"
|
||||
dependencies = [
|
||||
"clippy_lints",
|
||||
"clippy_utils",
|
||||
|
@ -645,7 +670,7 @@ dependencies = [
|
|||
"regex",
|
||||
"rustc-semver",
|
||||
"rustc-workspace-hack",
|
||||
"rustc_tools_util 0.2.0",
|
||||
"rustc_tools_util",
|
||||
"semver",
|
||||
"serde",
|
||||
"syn",
|
||||
|
@ -662,7 +687,7 @@ name = "clippy_dev"
|
|||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
"indoc",
|
||||
"itertools",
|
||||
"opener",
|
||||
|
@ -673,7 +698,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clippy_lints"
|
||||
version = "0.1.65"
|
||||
version = "0.1.66"
|
||||
dependencies = [
|
||||
"cargo_metadata 0.14.0",
|
||||
"clippy_utils",
|
||||
|
@ -695,7 +720,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clippy_utils"
|
||||
version = "0.1.65"
|
||||
version = "0.1.66"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"if_chain",
|
||||
|
@ -771,9 +796,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "compiler_builtins"
|
||||
version = "0.1.79"
|
||||
version = "0.1.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f873ce2bd3550b0b565f878b3d04ea8253f4259dc3d20223af2e1ba86f5ecca"
|
||||
checksum = "18cd7635fea7bb481ea543b392789844c1ad581299da70184c7175ce3af76603"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"rustc-std-workspace-core",
|
||||
|
@ -804,9 +829,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "compiletest_rs"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "262134ef87408da1ddfe45e33daa0ca43b75286d6b1076446e602d264cf9847e"
|
||||
checksum = "70489bbb718aea4f92e5f48f2e3b5be670c2051de30e57cb6e5377b4aa08b372"
|
||||
dependencies = [
|
||||
"diff",
|
||||
"filetime",
|
||||
|
@ -1775,7 +1800,7 @@ name = "installer"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
"flate2",
|
||||
"lazy_static",
|
||||
"num_cpus",
|
||||
|
@ -2118,7 +2143,7 @@ dependencies = [
|
|||
"ammonia",
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
"clap_complete",
|
||||
"elasticlunr-rs",
|
||||
"env_logger 0.9.0",
|
||||
|
@ -2387,6 +2412,15 @@ dependencies = [
|
|||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ordered-float"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "os_info"
|
||||
version = "3.5.0"
|
||||
|
@ -2618,9 +2652,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
|
|||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.18"
|
||||
version = "0.3.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d36492546b6af1463394d46f0c834346f31548646f6ba10849802c9c9a27ac33"
|
||||
checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
|
||||
|
||||
[[package]]
|
||||
name = "polonius-engine"
|
||||
|
@ -2687,11 +2721,11 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.37"
|
||||
version = "1.0.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1"
|
||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||
dependencies = [
|
||||
"unicode-xid",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2981,7 +3015,7 @@ dependencies = [
|
|||
name = "rustbook"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
"env_logger 0.7.1",
|
||||
"mdbook",
|
||||
]
|
||||
|
@ -3079,7 +3113,7 @@ name = "rustc-workspace-hack"
|
|||
version = "1.0.0"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
"libz-sys",
|
||||
"regex",
|
||||
"serde_json",
|
||||
|
@ -3243,7 +3277,6 @@ dependencies = [
|
|||
"bitflags",
|
||||
"cstr",
|
||||
"libc",
|
||||
"libloading",
|
||||
"measureme",
|
||||
"object 0.29.0",
|
||||
"rustc-demangle",
|
||||
|
@ -3266,6 +3299,7 @@ dependencies = [
|
|||
"rustc_symbol_mangling",
|
||||
"rustc_target",
|
||||
"smallvec",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
|
@ -3375,6 +3409,7 @@ dependencies = [
|
|||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
"rustc_hir",
|
||||
"rustc_hir_analysis",
|
||||
"rustc_hir_pretty",
|
||||
"rustc_interface",
|
||||
"rustc_lint",
|
||||
|
@ -3388,7 +3423,6 @@ dependencies = [
|
|||
"rustc_session",
|
||||
"rustc_span",
|
||||
"rustc_target",
|
||||
"rustc_typeck",
|
||||
"serde_json",
|
||||
"tracing",
|
||||
"winapi",
|
||||
|
@ -3419,6 +3453,8 @@ version = "0.0.0"
|
|||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"atty",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
"rustc_data_structures",
|
||||
"rustc_error_messages",
|
||||
"rustc_hir",
|
||||
|
@ -3493,6 +3529,34 @@ dependencies = [
|
|||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_hir_analysis"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc_arena",
|
||||
"rustc_ast",
|
||||
"rustc_attr",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
"rustc_graphviz",
|
||||
"rustc_hir",
|
||||
"rustc_hir_pretty",
|
||||
"rustc_index",
|
||||
"rustc_infer",
|
||||
"rustc_lint",
|
||||
"rustc_macros",
|
||||
"rustc_middle",
|
||||
"rustc_serialize",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"rustc_target",
|
||||
"rustc_trait_selection",
|
||||
"rustc_type_ir",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_hir_pretty"
|
||||
version = "0.0.0"
|
||||
|
@ -3572,6 +3636,7 @@ dependencies = [
|
|||
"rustc_errors",
|
||||
"rustc_expand",
|
||||
"rustc_hir",
|
||||
"rustc_hir_analysis",
|
||||
"rustc_incremental",
|
||||
"rustc_lint",
|
||||
"rustc_macros",
|
||||
|
@ -3594,7 +3659,6 @@ dependencies = [
|
|||
"rustc_trait_selection",
|
||||
"rustc_traits",
|
||||
"rustc_ty_utils",
|
||||
"rustc_typeck",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
"winapi",
|
||||
|
@ -3719,8 +3783,6 @@ dependencies = [
|
|||
"either",
|
||||
"gsgdt",
|
||||
"polonius-engine",
|
||||
"rand 0.8.5",
|
||||
"rand_xoshiro",
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
"rustc_apfloat",
|
||||
|
@ -3904,12 +3966,12 @@ dependencies = [
|
|||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_hir",
|
||||
"rustc_hir_analysis",
|
||||
"rustc_macros",
|
||||
"rustc_middle",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"rustc_trait_selection",
|
||||
"rustc_typeck",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
|
@ -4090,6 +4152,7 @@ version = "0.0.0"
|
|||
dependencies = [
|
||||
"bitflags",
|
||||
"rustc_data_structures",
|
||||
"rustc_feature",
|
||||
"rustc_index",
|
||||
"rustc_macros",
|
||||
"rustc_serialize",
|
||||
|
@ -4098,10 +4161,6 @@ dependencies = [
|
|||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_tools_util"
|
||||
version = "0.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_tools_util"
|
||||
version = "0.2.1"
|
||||
|
@ -4171,6 +4230,8 @@ dependencies = [
|
|||
name = "rustc_ty_utils"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rand 0.8.5",
|
||||
"rand_xoshiro",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_hir",
|
||||
|
@ -4198,35 +4259,6 @@ dependencies = [
|
|||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_typeck"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc_arena",
|
||||
"rustc_ast",
|
||||
"rustc_attr",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
"rustc_graphviz",
|
||||
"rustc_hir",
|
||||
"rustc_hir_pretty",
|
||||
"rustc_index",
|
||||
"rustc_infer",
|
||||
"rustc_lint",
|
||||
"rustc_macros",
|
||||
"rustc_middle",
|
||||
"rustc_serialize",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"rustc_target",
|
||||
"rustc_trait_selection",
|
||||
"rustc_ty_utils",
|
||||
"rustc_type_ir",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.4.0"
|
||||
|
@ -4310,7 +4342,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"bytecount",
|
||||
"cargo_metadata 0.14.0",
|
||||
"clap",
|
||||
"clap 3.2.20",
|
||||
"derive-new",
|
||||
"diff",
|
||||
"dirs",
|
||||
|
@ -4423,6 +4455,16 @@ dependencies = [
|
|||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde-value"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
|
||||
dependencies = [
|
||||
"ordered-float",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.143"
|
||||
|
@ -4677,13 +4719,13 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.91"
|
||||
version = "1.0.102"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d"
|
||||
checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-xid",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5173,6 +5215,12 @@ dependencies = [
|
|||
"matches",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.22"
|
||||
|
|
|
@ -4,7 +4,6 @@ version = "0.0.0"
|
|||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.2.1"
|
||||
|
|
|
@ -2060,8 +2060,11 @@ impl TyKind {
|
|||
}
|
||||
|
||||
pub fn is_simple_path(&self) -> Option<Symbol> {
|
||||
if let TyKind::Path(None, Path { segments, .. }) = &self && segments.len() == 1 {
|
||||
Some(segments[0].ident.name)
|
||||
if let TyKind::Path(None, Path { segments, .. }) = &self
|
||||
&& let [segment] = &segments[..]
|
||||
&& segment.args.is_none()
|
||||
{
|
||||
Some(segment.ident.name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -2953,7 +2956,7 @@ pub enum AssocItemKind {
|
|||
/// An associated function.
|
||||
Fn(Box<Fn>),
|
||||
/// An associated type.
|
||||
TyAlias(Box<TyAlias>),
|
||||
Type(Box<TyAlias>),
|
||||
/// A macro expanding to associated items.
|
||||
MacCall(P<MacCall>),
|
||||
}
|
||||
|
@ -2963,7 +2966,7 @@ impl AssocItemKind {
|
|||
match *self {
|
||||
Self::Const(defaultness, ..)
|
||||
| Self::Fn(box Fn { defaultness, .. })
|
||||
| Self::TyAlias(box TyAlias { defaultness, .. }) => defaultness,
|
||||
| Self::Type(box TyAlias { defaultness, .. }) => defaultness,
|
||||
Self::MacCall(..) => Defaultness::Final,
|
||||
}
|
||||
}
|
||||
|
@ -2974,7 +2977,7 @@ impl From<AssocItemKind> for ItemKind {
|
|||
match assoc_item_kind {
|
||||
AssocItemKind::Const(a, b, c) => ItemKind::Const(a, b, c),
|
||||
AssocItemKind::Fn(fn_kind) => ItemKind::Fn(fn_kind),
|
||||
AssocItemKind::TyAlias(ty_alias_kind) => ItemKind::TyAlias(ty_alias_kind),
|
||||
AssocItemKind::Type(ty_alias_kind) => ItemKind::TyAlias(ty_alias_kind),
|
||||
AssocItemKind::MacCall(a) => ItemKind::MacCall(a),
|
||||
}
|
||||
}
|
||||
|
@ -2987,7 +2990,7 @@ impl TryFrom<ItemKind> for AssocItemKind {
|
|||
Ok(match item_kind {
|
||||
ItemKind::Const(a, b, c) => AssocItemKind::Const(a, b, c),
|
||||
ItemKind::Fn(fn_kind) => AssocItemKind::Fn(fn_kind),
|
||||
ItemKind::TyAlias(ty_alias_kind) => AssocItemKind::TyAlias(ty_alias_kind),
|
||||
ItemKind::TyAlias(ty_kind) => AssocItemKind::Type(ty_kind),
|
||||
ItemKind::MacCall(a) => AssocItemKind::MacCall(a),
|
||||
_ => return Err(item_kind),
|
||||
})
|
||||
|
@ -3039,14 +3042,13 @@ pub type ForeignItem = Item<ForeignItemKind>;
|
|||
mod size_asserts {
|
||||
use super::*;
|
||||
use rustc_data_structures::static_assert_size;
|
||||
// These are in alphabetical order, which is easy to maintain.
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(AssocItem, 104);
|
||||
static_assert_size!(AssocItemKind, 32);
|
||||
static_assert_size!(Attribute, 32);
|
||||
static_assert_size!(Block, 48);
|
||||
static_assert_size!(Expr, 104);
|
||||
static_assert_size!(ExprKind, 72);
|
||||
#[cfg(not(bootstrap))]
|
||||
static_assert_size!(Fn, 184);
|
||||
static_assert_size!(ForeignItem, 96);
|
||||
static_assert_size!(ForeignItemKind, 24);
|
||||
|
@ -3061,11 +3063,12 @@ mod size_asserts {
|
|||
static_assert_size!(Local, 72);
|
||||
static_assert_size!(Param, 40);
|
||||
static_assert_size!(Pat, 120);
|
||||
static_assert_size!(PatKind, 96);
|
||||
static_assert_size!(Path, 40);
|
||||
static_assert_size!(PathSegment, 24);
|
||||
static_assert_size!(PatKind, 96);
|
||||
static_assert_size!(Stmt, 32);
|
||||
static_assert_size!(StmtKind, 16);
|
||||
static_assert_size!(Ty, 96);
|
||||
static_assert_size!(TyKind, 72);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
|
|
@ -13,9 +13,7 @@
|
|||
#![feature(const_default_impls)]
|
||||
#![feature(const_trait_impl)]
|
||||
#![feature(if_let_guard)]
|
||||
#![cfg_attr(bootstrap, feature(label_break_value))]
|
||||
#![feature(let_chains)]
|
||||
#![cfg_attr(bootstrap, feature(let_else))]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(negative_impls)]
|
||||
#![feature(slice_internals)]
|
||||
|
|
|
@ -1106,7 +1106,7 @@ pub fn noop_flat_map_assoc_item<T: MutVisitor>(
|
|||
visit_fn_sig(sig, visitor);
|
||||
visit_opt(body, |body| visitor.visit_block(body));
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAlias {
|
||||
AssocItemKind::Type(box TyAlias {
|
||||
defaultness,
|
||||
generics,
|
||||
where_clauses,
|
||||
|
|
|
@ -13,7 +13,7 @@ use rustc_span::symbol::{kw, sym};
|
|||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{self, edition::Edition, Span, DUMMY_SP};
|
||||
use std::borrow::Cow;
|
||||
use std::{fmt, mem};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||
pub enum CommentKind {
|
||||
|
@ -256,10 +256,6 @@ pub enum TokenKind {
|
|||
Eof,
|
||||
}
|
||||
|
||||
// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenKind, 16);
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
|
@ -335,11 +331,6 @@ impl Token {
|
|||
Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
|
||||
}
|
||||
|
||||
/// Return this token by value and leave a dummy token in its place.
|
||||
pub fn take(&mut self) -> Self {
|
||||
mem::replace(self, Token::dummy())
|
||||
}
|
||||
|
||||
/// For interpolated tokens, returns a span of the fragment to which the interpolated
|
||||
/// token refers. For all other tokens this is just a regular span.
|
||||
/// It is particularly important to use this for identifiers and lifetimes
|
||||
|
@ -354,17 +345,14 @@ impl Token {
|
|||
}
|
||||
|
||||
pub fn is_op(&self) -> bool {
|
||||
!matches!(
|
||||
self.kind,
|
||||
OpenDelim(..)
|
||||
| CloseDelim(..)
|
||||
| Literal(..)
|
||||
| DocComment(..)
|
||||
| Ident(..)
|
||||
| Lifetime(..)
|
||||
| Interpolated(..)
|
||||
| Eof
|
||||
)
|
||||
match self.kind {
|
||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
|
||||
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
|
||||
| ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => true,
|
||||
|
||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
|
||||
| Lifetime(..) | Interpolated(..) | Eof => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_like_plus(&self) -> bool {
|
||||
|
@ -733,6 +721,7 @@ impl Token {
|
|||
}
|
||||
|
||||
impl PartialEq<TokenKind> for Token {
|
||||
#[inline]
|
||||
fn eq(&self, rhs: &TokenKind) -> bool {
|
||||
self.kind == *rhs
|
||||
}
|
||||
|
@ -756,10 +745,6 @@ pub enum Nonterminal {
|
|||
NtVis(P<ast::Visibility>),
|
||||
}
|
||||
|
||||
// `Nonterminal` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Nonterminal, 16);
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable)]
|
||||
pub enum NonterminalKind {
|
||||
Item,
|
||||
|
@ -898,3 +883,17 @@ where
|
|||
panic!("interpolated tokens should not be present in the HIR")
|
||||
}
|
||||
}
|
||||
|
||||
// Some types are used a lot. Make sure they don't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
mod size_asserts {
|
||||
use super::*;
|
||||
use rustc_data_structures::static_assert_size;
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(Lit, 12);
|
||||
static_assert_size!(LitKind, 2);
|
||||
static_assert_size!(Nonterminal, 16);
|
||||
static_assert_size!(Token, 24);
|
||||
static_assert_size!(TokenKind, 16);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
|
|
@ -47,10 +47,6 @@ pub enum TokenTree {
|
|||
Delimited(DelimSpan, Delimiter, TokenStream),
|
||||
}
|
||||
|
||||
// This type is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenTree, 32);
|
||||
|
||||
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
|
||||
#[cfg(parallel_compiler)]
|
||||
fn _dummy()
|
||||
|
@ -249,12 +245,12 @@ impl AttrTokenStream {
|
|||
// properly implemented - we always synthesize fake tokens,
|
||||
// so we never reach this code.
|
||||
|
||||
let mut builder = TokenStreamBuilder::new();
|
||||
let mut stream = TokenStream::default();
|
||||
for inner_attr in inner_attrs {
|
||||
builder.push(inner_attr.tokens());
|
||||
stream.push_stream(inner_attr.tokens());
|
||||
}
|
||||
builder.push(delim_tokens.clone());
|
||||
*tree = TokenTree::Delimited(*span, *delim, builder.build());
|
||||
stream.push_stream(delim_tokens.clone());
|
||||
*tree = TokenTree::Delimited(*span, *delim, stream);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
|
@ -308,13 +304,20 @@ pub struct AttributesData {
|
|||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
|
||||
|
||||
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenStream, 8);
|
||||
|
||||
/// Similar to `proc_macro::Spacing`, but for tokens.
|
||||
///
|
||||
/// Note that all `ast::TokenTree::Token` instances have a `Spacing`, but when
|
||||
/// we convert to `proc_macro::TokenTree` for proc macros only `Punct`
|
||||
/// `TokenTree`s have a `proc_macro::Spacing`.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum Spacing {
|
||||
/// The token is not immediately followed by an operator token (as
|
||||
/// determined by `Token::is_op`). E.g. a `+` token is `Alone` in `+ =`,
|
||||
/// `+/*foo*/=`, `+ident`, and `+()`.
|
||||
Alone,
|
||||
|
||||
/// The token is immediately followed by an operator token. E.g. a `+`
|
||||
/// token is `Joint` in `+=` and `++`.
|
||||
Joint,
|
||||
}
|
||||
|
||||
|
@ -502,76 +505,49 @@ impl TokenStream {
|
|||
|
||||
self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
// 99.5%+ of the time we have 1 or 2 elements in this vector.
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStreamBuilder(SmallVec<[TokenStream; 2]>);
|
||||
|
||||
impl TokenStreamBuilder {
|
||||
pub fn new() -> TokenStreamBuilder {
|
||||
TokenStreamBuilder(SmallVec::new())
|
||||
// If `vec` is not empty, try to glue `tt` onto its last token. The return
|
||||
// value indicates if gluing took place.
|
||||
fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
|
||||
if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = vec.last()
|
||||
&& let TokenTree::Token(tok, spacing) = tt
|
||||
&& let Some(glued_tok) = last_tok.glue(&tok)
|
||||
{
|
||||
// ...then overwrite the last token tree in `vec` with the
|
||||
// glued token, and skip the first token tree from `stream`.
|
||||
*vec.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, stream: TokenStream) {
|
||||
self.0.push(stream);
|
||||
// Push `tt` onto the end of the stream, possibly gluing it to the last
|
||||
// token. Uses `make_mut` to maximize efficiency.
|
||||
pub fn push_tree(&mut self, tt: TokenTree) {
|
||||
let vec_mut = Lrc::make_mut(&mut self.0);
|
||||
|
||||
if Self::try_glue_to_last(vec_mut, &tt) {
|
||||
// nothing else to do
|
||||
} else {
|
||||
vec_mut.push(tt);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> TokenStream {
|
||||
let mut streams = self.0;
|
||||
match streams.len() {
|
||||
0 => TokenStream::default(),
|
||||
1 => streams.pop().unwrap(),
|
||||
_ => {
|
||||
// We will extend the first stream in `streams` with the
|
||||
// elements from the subsequent streams. This requires using
|
||||
// `make_mut()` on the first stream, and in practice this
|
||||
// doesn't cause cloning 99.9% of the time.
|
||||
//
|
||||
// One very common use case is when `streams` has two elements,
|
||||
// where the first stream has any number of elements within
|
||||
// (often 1, but sometimes many more) and the second stream has
|
||||
// a single element within.
|
||||
// Push `stream` onto the end of the stream, possibly gluing the first
|
||||
// token tree to the last token. (No other token trees will be glued.)
|
||||
// Uses `make_mut` to maximize efficiency.
|
||||
pub fn push_stream(&mut self, stream: TokenStream) {
|
||||
let vec_mut = Lrc::make_mut(&mut self.0);
|
||||
|
||||
// Determine how much the first stream will be extended.
|
||||
// Needed to avoid quadratic blow up from on-the-fly
|
||||
// reallocations (#57735).
|
||||
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
|
||||
let stream_iter = stream.0.iter().cloned();
|
||||
|
||||
// Get the first stream, which will become the result stream.
|
||||
// If it's `None`, create an empty stream.
|
||||
let mut iter = streams.into_iter();
|
||||
let mut res_stream_lrc = iter.next().unwrap().0;
|
||||
|
||||
// Append the subsequent elements to the result stream, after
|
||||
// reserving space for them.
|
||||
let res_vec_mut = Lrc::make_mut(&mut res_stream_lrc);
|
||||
res_vec_mut.reserve(num_appends);
|
||||
for stream in iter {
|
||||
let stream_iter = stream.0.iter().cloned();
|
||||
|
||||
// If (a) `res_mut_vec` is not empty and the last tree
|
||||
// within it is a token tree marked with `Joint`, and (b)
|
||||
// `stream` is not empty and the first tree within it is a
|
||||
// token tree, and (c) the two tokens can be glued
|
||||
// together...
|
||||
if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = res_vec_mut.last()
|
||||
&& let Some(TokenTree::Token(tok, spacing)) = stream.0.first()
|
||||
&& let Some(glued_tok) = last_tok.glue(&tok)
|
||||
{
|
||||
// ...then overwrite the last token tree in
|
||||
// `res_vec_mut` with the glued token, and skip the
|
||||
// first token tree from `stream`.
|
||||
*res_vec_mut.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
|
||||
res_vec_mut.extend(stream_iter.skip(1));
|
||||
} else {
|
||||
// Append all of `stream`.
|
||||
res_vec_mut.extend(stream_iter);
|
||||
}
|
||||
}
|
||||
|
||||
TokenStream(res_stream_lrc)
|
||||
}
|
||||
if let Some(first) = stream.0.first() && Self::try_glue_to_last(vec_mut, first) {
|
||||
// Now skip the first token tree from `stream`.
|
||||
vec_mut.extend(stream_iter.skip(1));
|
||||
} else {
|
||||
// Append all of `stream`.
|
||||
vec_mut.extend(stream_iter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -664,3 +640,17 @@ impl DelimSpan {
|
|||
self.open.with_hi(self.close.hi())
|
||||
}
|
||||
}
|
||||
|
||||
// Some types are used a lot. Make sure they don't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
mod size_asserts {
|
||||
use super::*;
|
||||
use rustc_data_structures::static_assert_size;
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(AttrTokenStream, 8);
|
||||
static_assert_size!(AttrTokenTree, 32);
|
||||
static_assert_size!(LazyAttrTokenStream, 8);
|
||||
static_assert_size!(TokenStream, 8);
|
||||
static_assert_size!(TokenTree, 32);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
|
|
@ -244,14 +244,12 @@ pub trait Visitor<'ast>: Sized {
|
|||
|
||||
#[macro_export]
|
||||
macro_rules! walk_list {
|
||||
($visitor: expr, $method: ident, $list: expr) => {
|
||||
for elem in $list {
|
||||
$visitor.$method(elem)
|
||||
}
|
||||
};
|
||||
($visitor: expr, $method: ident, $list: expr, $($extra_args: expr),*) => {
|
||||
for elem in $list {
|
||||
$visitor.$method(elem, $($extra_args,)*)
|
||||
($visitor: expr, $method: ident, $list: expr $(, $($extra_args: expr),* )?) => {
|
||||
{
|
||||
#[cfg_attr(not(bootstrap), allow(for_loops_over_fallibles))]
|
||||
for elem in $list {
|
||||
$visitor.$method(elem $(, $($extra_args,)* )?)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -685,7 +683,7 @@ pub fn walk_assoc_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a AssocItem,
|
|||
let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), ident, sig, vis, generics, body.as_deref());
|
||||
visitor.visit_fn(kind, span, id);
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAlias { generics, bounds, ty, .. }) => {
|
||||
AssocItemKind::Type(box TyAlias { generics, bounds, ty, .. }) => {
|
||||
visitor.visit_generics(generics);
|
||||
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound);
|
||||
walk_list!(visitor, visit_ty, ty);
|
||||
|
|
|
@ -192,26 +192,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
}
|
||||
InlineAsmOperand::Sym { ref sym } => {
|
||||
if !self.tcx.features().asm_sym {
|
||||
feature_err(
|
||||
&sess.parse_sess,
|
||||
sym::asm_sym,
|
||||
*op_sp,
|
||||
"sym operands for inline assembly are unstable",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
|
||||
let static_def_id = self
|
||||
.resolver
|
||||
.get_partial_res(sym.id)
|
||||
.filter(|res| res.unresolved_segments() == 0)
|
||||
.and_then(|res| {
|
||||
if let Res::Def(DefKind::Static(_), def_id) = res.base_res() {
|
||||
Some(def_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
.and_then(|res| res.full_res())
|
||||
.and_then(|res| match res {
|
||||
Res::Def(DefKind::Static(_), def_id) => Some(def_id),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
if let Some(def_id) = static_def_id {
|
||||
|
@ -237,7 +224,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// Wrap the expression in an AnonConst.
|
||||
let parent_def_id = self.current_hir_id_owner;
|
||||
let node_id = self.next_node_id();
|
||||
self.create_def(parent_def_id, node_id, DefPathData::AnonConst);
|
||||
self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst);
|
||||
let anon_const = AnonConst { id: node_id, value: P(expr) };
|
||||
hir::InlineAsmOperand::SymFn {
|
||||
anon_const: self.lower_anon_const(&anon_const),
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
use rustc_errors::{fluent, AddToDiagnostic, Applicability, Diagnostic, DiagnosticArgFromDisplay};
|
||||
use rustc_errors::{
|
||||
fluent, AddToDiagnostic, Applicability, Diagnostic, DiagnosticArgFromDisplay,
|
||||
SubdiagnosticMessage,
|
||||
};
|
||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||
use rustc_span::{symbol::Ident, Span, Symbol};
|
||||
|
||||
|
@ -19,7 +22,10 @@ pub struct UseAngleBrackets {
|
|||
}
|
||||
|
||||
impl AddToDiagnostic for UseAngleBrackets {
|
||||
fn add_to_diagnostic(self, diag: &mut Diagnostic) {
|
||||
fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
|
||||
where
|
||||
F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
|
||||
{
|
||||
diag.multipart_suggestion(
|
||||
fluent::ast_lowering::use_angle_brackets,
|
||||
vec![(self.open_param, String::from("<")), (self.close_param, String::from(">"))],
|
||||
|
@ -29,14 +35,28 @@ impl AddToDiagnostic for UseAngleBrackets {
|
|||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[help]
|
||||
#[diag(ast_lowering::invalid_abi, code = "E0703")]
|
||||
#[note]
|
||||
pub struct InvalidAbi {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub abi: Symbol,
|
||||
pub valid_abis: String,
|
||||
pub command: String,
|
||||
#[subdiagnostic]
|
||||
pub suggestion: Option<InvalidAbiSuggestion>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[suggestion(
|
||||
ast_lowering::invalid_abi_suggestion,
|
||||
code = "{suggestion}",
|
||||
applicability = "maybe-incorrect"
|
||||
)]
|
||||
pub struct InvalidAbiSuggestion {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub suggestion: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic, Clone, Copy)]
|
||||
|
@ -55,7 +75,10 @@ pub enum AssocTyParenthesesSub {
|
|||
}
|
||||
|
||||
impl AddToDiagnostic for AssocTyParenthesesSub {
|
||||
fn add_to_diagnostic(self, diag: &mut Diagnostic) {
|
||||
fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
|
||||
where
|
||||
F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
|
||||
{
|
||||
match self {
|
||||
Self::Empty { parentheses_span } => diag.multipart_suggestion(
|
||||
fluent::ast_lowering::remove_parentheses,
|
||||
|
|
|
@ -359,7 +359,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let node_id = self.next_node_id();
|
||||
|
||||
// Add a definition for the in-band const def.
|
||||
self.create_def(parent_def_id, node_id, DefPathData::AnonConst);
|
||||
self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst);
|
||||
|
||||
let anon_const = AnonConst { id: node_id, value: arg };
|
||||
generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
|
||||
|
@ -387,32 +387,58 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
then: &Block,
|
||||
else_opt: Option<&Expr>,
|
||||
) -> hir::ExprKind<'hir> {
|
||||
let lowered_cond = self.lower_expr(cond);
|
||||
let new_cond = self.manage_let_cond(lowered_cond);
|
||||
let lowered_cond = self.lower_cond(cond);
|
||||
let then_expr = self.lower_block_expr(then);
|
||||
if let Some(rslt) = else_opt {
|
||||
hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt)))
|
||||
hir::ExprKind::If(
|
||||
lowered_cond,
|
||||
self.arena.alloc(then_expr),
|
||||
Some(self.lower_expr(rslt)),
|
||||
)
|
||||
} else {
|
||||
hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), None)
|
||||
hir::ExprKind::If(lowered_cond, self.arena.alloc(then_expr), None)
|
||||
}
|
||||
}
|
||||
|
||||
// If `cond` kind is `let`, returns `let`. Otherwise, wraps and returns `cond`
|
||||
// in a temporary block.
|
||||
fn manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir> {
|
||||
fn has_let_expr<'hir>(expr: &'hir hir::Expr<'hir>) -> bool {
|
||||
match expr.kind {
|
||||
hir::ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
|
||||
hir::ExprKind::Let(..) => true,
|
||||
// Lowers a condition (i.e. `cond` in `if cond` or `while cond`), wrapping it in a terminating scope
|
||||
// so that temporaries created in the condition don't live beyond it.
|
||||
fn lower_cond(&mut self, cond: &Expr) -> &'hir hir::Expr<'hir> {
|
||||
fn has_let_expr(expr: &Expr) -> bool {
|
||||
match &expr.kind {
|
||||
ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
|
||||
ExprKind::Let(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
if has_let_expr(cond) {
|
||||
cond
|
||||
} else {
|
||||
let reason = DesugaringKind::CondTemporary;
|
||||
let span_block = self.mark_span_with_reason(reason, cond.span, None);
|
||||
self.expr_drop_temps(span_block, cond, AttrVec::new())
|
||||
|
||||
// We have to take special care for `let` exprs in the condition, e.g. in
|
||||
// `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the
|
||||
// condition in this case.
|
||||
//
|
||||
// In order to mantain the drop behavior for the non `let` parts of the condition,
|
||||
// we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially
|
||||
// gets transformed into `if { let _t = foo; _t } && let pat = val`
|
||||
match &cond.kind {
|
||||
ExprKind::Binary(op @ Spanned { node: ast::BinOpKind::And, .. }, lhs, rhs)
|
||||
if has_let_expr(cond) =>
|
||||
{
|
||||
let op = self.lower_binop(*op);
|
||||
let lhs = self.lower_cond(lhs);
|
||||
let rhs = self.lower_cond(rhs);
|
||||
|
||||
self.arena.alloc(self.expr(
|
||||
cond.span,
|
||||
hir::ExprKind::Binary(op, lhs, rhs),
|
||||
AttrVec::new(),
|
||||
))
|
||||
}
|
||||
ExprKind::Let(..) => self.lower_expr(cond),
|
||||
_ => {
|
||||
let cond = self.lower_expr(cond);
|
||||
let reason = DesugaringKind::CondTemporary;
|
||||
let span_block = self.mark_span_with_reason(reason, cond.span, None);
|
||||
self.expr_drop_temps(span_block, cond, AttrVec::new())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -439,14 +465,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
body: &Block,
|
||||
opt_label: Option<Label>,
|
||||
) -> hir::ExprKind<'hir> {
|
||||
let lowered_cond = self.with_loop_condition_scope(|t| t.lower_expr(cond));
|
||||
let new_cond = self.manage_let_cond(lowered_cond);
|
||||
let lowered_cond = self.with_loop_condition_scope(|t| t.lower_cond(cond));
|
||||
let then = self.lower_block_expr(body);
|
||||
let expr_break = self.expr_break(span, AttrVec::new());
|
||||
let stmt_break = self.stmt_expr(span, expr_break);
|
||||
let else_blk = self.block_all(span, arena_vec![self; stmt_break], None);
|
||||
let else_expr = self.arena.alloc(self.expr_block(else_blk, AttrVec::new()));
|
||||
let if_kind = hir::ExprKind::If(new_cond, self.arena.alloc(then), Some(else_expr));
|
||||
let if_kind = hir::ExprKind::If(lowered_cond, self.arena.alloc(then), Some(else_expr));
|
||||
let if_expr = self.expr(span, if_kind, AttrVec::new());
|
||||
let block = self.block_expr(self.arena.alloc(if_expr));
|
||||
let span = self.lower_span(span.with_hi(cond.span.hi()));
|
||||
|
@ -1044,9 +1069,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
if let ExprKind::Path(qself, path) = &expr.kind {
|
||||
// Does the path resolve to something disallowed in a tuple struct/variant pattern?
|
||||
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
|
||||
if partial_res.unresolved_segments() == 0
|
||||
&& !partial_res.base_res().expected_in_tuple_struct_pat()
|
||||
{
|
||||
if let Some(res) = partial_res.full_res() && !res.expected_in_tuple_struct_pat() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
@ -1066,9 +1089,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
if let ExprKind::Path(qself, path) = &expr.kind {
|
||||
// Does the path resolve to something disallowed in a unit struct/variant pattern?
|
||||
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
|
||||
if partial_res.unresolved_segments() == 0
|
||||
&& !partial_res.base_res().expected_in_unit_struct_pat()
|
||||
{
|
||||
if let Some(res) = partial_res.full_res() && !res.expected_in_unit_struct_pat() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
@ -1609,11 +1630,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
/// Desugar `ExprKind::Yeet` from: `do yeet <expr>` into:
|
||||
/// ```rust
|
||||
/// ```ignore(illustrative)
|
||||
/// // If there is an enclosing `try {...}`:
|
||||
/// break 'catch_target FromResidual::from_residual(Yeet(residual)),
|
||||
/// break 'catch_target FromResidual::from_residual(Yeet(residual));
|
||||
/// // Otherwise:
|
||||
/// return FromResidual::from_residual(Yeet(residual)),
|
||||
/// return FromResidual::from_residual(Yeet(residual));
|
||||
/// ```
|
||||
/// But to simplify this, there's a `from_yeet` lang item function which
|
||||
/// handles the combined `FromResidual::from_residual(Yeet(residual))`.
|
||||
|
|
|
@ -24,7 +24,7 @@ pub(super) struct NodeCollector<'a, 'hir> {
|
|||
/// The parent of this node
|
||||
parent_node: hir::ItemLocalId,
|
||||
|
||||
owner: LocalDefId,
|
||||
owner: OwnerId,
|
||||
|
||||
definitions: &'a definitions::Definitions,
|
||||
}
|
||||
|
@ -81,9 +81,9 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
|
|||
current_dep_node_owner={} ({:?}), hir_id.owner={} ({:?})",
|
||||
self.source_map.span_to_diagnostic_string(span),
|
||||
node,
|
||||
self.definitions.def_path(self.owner).to_string_no_crate_verbose(),
|
||||
self.definitions.def_path(self.owner.def_id).to_string_no_crate_verbose(),
|
||||
self.owner,
|
||||
self.definitions.def_path(hir_id.owner).to_string_no_crate_verbose(),
|
||||
self.definitions.def_path(hir_id.owner.def_id).to_string_no_crate_verbose(),
|
||||
hir_id.owner,
|
||||
)
|
||||
}
|
||||
|
@ -112,19 +112,19 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
|
|||
|
||||
fn visit_nested_item(&mut self, item: ItemId) {
|
||||
debug!("visit_nested_item: {:?}", item);
|
||||
self.insert_nested(item.def_id);
|
||||
self.insert_nested(item.def_id.def_id);
|
||||
}
|
||||
|
||||
fn visit_nested_trait_item(&mut self, item_id: TraitItemId) {
|
||||
self.insert_nested(item_id.def_id);
|
||||
self.insert_nested(item_id.def_id.def_id);
|
||||
}
|
||||
|
||||
fn visit_nested_impl_item(&mut self, item_id: ImplItemId) {
|
||||
self.insert_nested(item_id.def_id);
|
||||
self.insert_nested(item_id.def_id.def_id);
|
||||
}
|
||||
|
||||
fn visit_nested_foreign_item(&mut self, foreign_id: ForeignItemId) {
|
||||
self.insert_nested(foreign_id.def_id);
|
||||
self.insert_nested(foreign_id.def_id.def_id);
|
||||
}
|
||||
|
||||
fn visit_nested_body(&mut self, id: BodyId) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use super::errors::{InvalidAbi, MisplacedRelaxTraitBound};
|
||||
use super::errors::{InvalidAbi, InvalidAbiSuggestion, MisplacedRelaxTraitBound};
|
||||
use super::ResolverAstLoweringExt;
|
||||
use super::{Arena, AstOwner, ImplTraitContext, ImplTraitPosition};
|
||||
use super::{FnDeclKind, LoweringContext, ParamMode};
|
||||
|
@ -14,9 +14,10 @@ use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
|||
use rustc_hir::PredicateOrigin;
|
||||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_middle::ty::{DefIdTree, ResolverAstLowering, TyCtxt};
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::source_map::DesugaringKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::Span;
|
||||
use rustc_span::{Span, Symbol};
|
||||
use rustc_target::spec::abi;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
|
@ -67,7 +68,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
|||
bodies: Vec::new(),
|
||||
attrs: SortedMap::default(),
|
||||
children: FxHashMap::default(),
|
||||
current_hir_id_owner: CRATE_DEF_ID,
|
||||
current_hir_id_owner: hir::CRATE_OWNER_ID,
|
||||
item_local_id_counter: hir::ItemLocalId::new(0),
|
||||
node_id_to_local_id: Default::default(),
|
||||
local_id_to_def_id: SortedMap::new(),
|
||||
|
@ -176,7 +177,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
pub(super) fn lower_item_ref(&mut self, i: &Item) -> SmallVec<[hir::ItemId; 1]> {
|
||||
let mut node_ids = smallvec![hir::ItemId { def_id: self.local_def_id(i.id) }];
|
||||
let mut node_ids =
|
||||
smallvec![hir::ItemId { def_id: hir::OwnerId { def_id: self.local_def_id(i.id) } }];
|
||||
if let ItemKind::Use(ref use_tree) = &i.kind {
|
||||
self.lower_item_id_use_tree(use_tree, i.id, &mut node_ids);
|
||||
}
|
||||
|
@ -192,7 +194,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
match tree.kind {
|
||||
UseTreeKind::Nested(ref nested_vec) => {
|
||||
for &(ref nested, id) in nested_vec {
|
||||
vec.push(hir::ItemId { def_id: self.local_def_id(id) });
|
||||
vec.push(hir::ItemId {
|
||||
def_id: hir::OwnerId { def_id: self.local_def_id(id) },
|
||||
});
|
||||
self.lower_item_id_use_tree(nested, id, vec);
|
||||
}
|
||||
}
|
||||
|
@ -201,7 +205,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
for (_, &id) in
|
||||
iter::zip(self.expect_full_res_from_use(base_id).skip(1), &[id1, id2])
|
||||
{
|
||||
vec.push(hir::ItemId { def_id: self.local_def_id(id) });
|
||||
vec.push(hir::ItemId {
|
||||
def_id: hir::OwnerId { def_id: self.local_def_id(id) },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -539,7 +545,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let ident = *ident;
|
||||
let mut path = path.clone();
|
||||
for seg in &mut path.segments {
|
||||
seg.id = self.next_node_id();
|
||||
// Give the cloned segment the same resolution information
|
||||
// as the old one (this is needed for stability checking).
|
||||
let new_id = self.next_node_id();
|
||||
self.resolver.clone_res(seg.id, new_id);
|
||||
seg.id = new_id;
|
||||
}
|
||||
let span = path.span;
|
||||
|
||||
|
@ -552,7 +562,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
let item = hir::Item {
|
||||
def_id: new_id,
|
||||
def_id: hir::OwnerId { def_id: new_id },
|
||||
ident: this.lower_ident(ident),
|
||||
kind,
|
||||
vis_span,
|
||||
|
@ -608,7 +618,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
// Give the segments new node-ids since they are being cloned.
|
||||
for seg in &mut prefix.segments {
|
||||
seg.id = self.next_node_id();
|
||||
// Give the cloned segment the same resolution information
|
||||
// as the old one (this is needed for stability checking).
|
||||
let new_id = self.next_node_id();
|
||||
self.resolver.clone_res(seg.id, new_id);
|
||||
seg.id = new_id;
|
||||
}
|
||||
|
||||
// Each `use` import is an item and thus are owners of the
|
||||
|
@ -626,7 +640,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
let item = hir::Item {
|
||||
def_id: new_hir_id,
|
||||
def_id: hir::OwnerId { def_id: new_hir_id },
|
||||
ident: this.lower_ident(ident),
|
||||
kind,
|
||||
vis_span,
|
||||
|
@ -688,7 +702,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
fn lower_foreign_item_ref(&mut self, i: &ForeignItem) -> hir::ForeignItemRef {
|
||||
hir::ForeignItemRef {
|
||||
id: hir::ForeignItemId { def_id: self.local_def_id(i.id) },
|
||||
id: hir::ForeignItemId { def_id: hir::OwnerId { def_id: self.local_def_id(i.id) } },
|
||||
ident: self.lower_ident(i.ident),
|
||||
span: self.lower_span(i.span),
|
||||
}
|
||||
|
@ -798,7 +812,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
);
|
||||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true)
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAlias {
|
||||
AssocItemKind::Type(box TyAlias {
|
||||
ref generics,
|
||||
where_clauses,
|
||||
ref bounds,
|
||||
|
@ -844,13 +858,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
fn lower_trait_item_ref(&mut self, i: &AssocItem) -> hir::TraitItemRef {
|
||||
let kind = match &i.kind {
|
||||
AssocItemKind::Const(..) => hir::AssocItemKind::Const,
|
||||
AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type,
|
||||
AssocItemKind::Type(..) => hir::AssocItemKind::Type,
|
||||
AssocItemKind::Fn(box Fn { sig, .. }) => {
|
||||
hir::AssocItemKind::Fn { has_self: sig.decl.has_self() }
|
||||
}
|
||||
AssocItemKind::MacCall(..) => unimplemented!(),
|
||||
};
|
||||
let id = hir::TraitItemId { def_id: self.local_def_id(i.id) };
|
||||
let id = hir::TraitItemId { def_id: hir::OwnerId { def_id: self.local_def_id(i.id) } };
|
||||
hir::TraitItemRef {
|
||||
id,
|
||||
ident: self.lower_ident(i.ident),
|
||||
|
@ -892,7 +906,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
(generics, hir::ImplItemKind::Fn(sig, body_id))
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAlias { generics, where_clauses, ty, .. }) => {
|
||||
AssocItemKind::Type(box TyAlias { generics, where_clauses, ty, .. }) => {
|
||||
let mut generics = generics.clone();
|
||||
add_ty_alias_where_clause(&mut generics, *where_clauses, false);
|
||||
self.lower_generics(
|
||||
|
@ -902,11 +916,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|this| match ty {
|
||||
None => {
|
||||
let ty = this.arena.alloc(this.ty(i.span, hir::TyKind::Err));
|
||||
hir::ImplItemKind::TyAlias(ty)
|
||||
hir::ImplItemKind::Type(ty)
|
||||
}
|
||||
Some(ty) => {
|
||||
let ty = this.lower_ty(ty, &ImplTraitContext::TypeAliasesOpaqueTy);
|
||||
hir::ImplItemKind::TyAlias(ty)
|
||||
hir::ImplItemKind::Type(ty)
|
||||
}
|
||||
},
|
||||
)
|
||||
|
@ -930,18 +944,21 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
fn lower_impl_item_ref(&mut self, i: &AssocItem) -> hir::ImplItemRef {
|
||||
hir::ImplItemRef {
|
||||
id: hir::ImplItemId { def_id: self.local_def_id(i.id) },
|
||||
id: hir::ImplItemId { def_id: hir::OwnerId { def_id: self.local_def_id(i.id) } },
|
||||
ident: self.lower_ident(i.ident),
|
||||
span: self.lower_span(i.span),
|
||||
kind: match &i.kind {
|
||||
AssocItemKind::Const(..) => hir::AssocItemKind::Const,
|
||||
AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type,
|
||||
AssocItemKind::Type(..) => hir::AssocItemKind::Type,
|
||||
AssocItemKind::Fn(box Fn { sig, .. }) => {
|
||||
hir::AssocItemKind::Fn { has_self: sig.decl.has_self() }
|
||||
}
|
||||
AssocItemKind::MacCall(..) => unimplemented!(),
|
||||
},
|
||||
trait_item_def_id: self.resolver.get_partial_res(i.id).map(|r| r.base_res().def_id()),
|
||||
trait_item_def_id: self
|
||||
.resolver
|
||||
.get_partial_res(i.id)
|
||||
.map(|r| r.expect_full_res().def_id()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1049,9 +1066,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
asyncness: Async,
|
||||
body: Option<&Block>,
|
||||
) -> hir::BodyId {
|
||||
let closure_id = match asyncness {
|
||||
Async::Yes { closure_id, .. } => closure_id,
|
||||
Async::No => return self.lower_fn_body_block(span, decl, body),
|
||||
let (closure_id, body) = match (asyncness, body) {
|
||||
(Async::Yes { closure_id, .. }, Some(body)) => (closure_id, body),
|
||||
_ => return self.lower_fn_body_block(span, decl, body),
|
||||
};
|
||||
|
||||
self.lower_body(|this| {
|
||||
|
@ -1193,16 +1210,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
parameters.push(new_parameter);
|
||||
}
|
||||
|
||||
let body_span = body.map_or(span, |b| b.span);
|
||||
let async_expr = this.make_async_expr(
|
||||
CaptureBy::Value,
|
||||
closure_id,
|
||||
None,
|
||||
body_span,
|
||||
body.span,
|
||||
hir::AsyncGeneratorKind::Fn,
|
||||
|this| {
|
||||
// Create a block from the user's function body:
|
||||
let user_body = this.lower_block_expr_opt(body_span, body);
|
||||
let user_body = this.lower_block_expr(body);
|
||||
|
||||
// Transform into `drop-temps { <user-body> }`, an expression:
|
||||
let desugared_span =
|
||||
|
@ -1234,7 +1250,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
(
|
||||
this.arena.alloc_from_iter(parameters),
|
||||
this.expr(body_span, async_expr, AttrVec::new()),
|
||||
this.expr(body.span, async_expr, AttrVec::new()),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
@ -1280,10 +1296,19 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
fn error_on_invalid_abi(&self, abi: StrLit) {
|
||||
let abi_names = abi::enabled_names(self.tcx.features(), abi.span)
|
||||
.iter()
|
||||
.map(|s| Symbol::intern(s))
|
||||
.collect::<Vec<_>>();
|
||||
let suggested_name = find_best_match_for_name(&abi_names, abi.symbol_unescaped, None);
|
||||
self.tcx.sess.emit_err(InvalidAbi {
|
||||
abi: abi.symbol_unescaped,
|
||||
span: abi.span,
|
||||
abi: abi.symbol,
|
||||
valid_abis: abi::all_names().join(", "),
|
||||
suggestion: suggested_name.map(|suggested_name| InvalidAbiSuggestion {
|
||||
span: abi.span,
|
||||
suggestion: format!("\"{suggested_name}\""),
|
||||
}),
|
||||
command: "rustc --print=calling-conventions".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1335,9 +1360,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
match self
|
||||
.resolver
|
||||
.get_partial_res(bound_pred.bounded_ty.id)
|
||||
.map(|d| (d.base_res(), d.unresolved_segments()))
|
||||
.and_then(|r| r.full_res())
|
||||
{
|
||||
Some((Res::Def(DefKind::TyParam, def_id), 0))
|
||||
Some(Res::Def(DefKind::TyParam, def_id))
|
||||
if bound_pred.bound_generic_params.is_empty() =>
|
||||
{
|
||||
generics
|
||||
|
@ -1464,6 +1489,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let bounded_ty =
|
||||
self.ty_path(ty_id, param_span, hir::QPath::Resolved(None, ty_path));
|
||||
Some(hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
|
||||
hir_id: self.next_id(),
|
||||
bounded_ty: self.arena.alloc(bounded_ty),
|
||||
bounds,
|
||||
span,
|
||||
|
@ -1494,6 +1520,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
ref bounds,
|
||||
span,
|
||||
}) => hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
|
||||
hir_id: self.next_id(),
|
||||
bound_generic_params: self.lower_generic_params(bound_generic_params),
|
||||
bounded_ty: self
|
||||
.lower_ty(bounded_ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type)),
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
#![feature(box_patterns)]
|
||||
#![feature(let_chains)]
|
||||
#![cfg_attr(bootstrap, feature(let_else))]
|
||||
#![feature(never_type)]
|
||||
#![recursion_limit = "256"]
|
||||
#![allow(rustc::potential_query_instability)]
|
||||
|
@ -62,8 +61,8 @@ use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
|||
use rustc_hir::definitions::DefPathData;
|
||||
use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate};
|
||||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
|
||||
use rustc_middle::{bug, span_bug};
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::source_map::DesugaringKind;
|
||||
|
@ -126,7 +125,7 @@ struct LoweringContext<'a, 'hir> {
|
|||
is_in_trait_impl: bool,
|
||||
is_in_dyn_type: bool,
|
||||
|
||||
current_hir_id_owner: LocalDefId,
|
||||
current_hir_id_owner: hir::OwnerId,
|
||||
item_local_id_counter: hir::ItemLocalId,
|
||||
local_id_to_def_id: SortedMap<ItemLocalId, LocalDefId>,
|
||||
trait_map: FxHashMap<ItemLocalId, Box<[TraitCandidate]>>,
|
||||
|
@ -161,6 +160,10 @@ trait ResolverAstLoweringExt {
|
|||
fn legacy_const_generic_args(&self, expr: &Expr) -> Option<Vec<usize>>;
|
||||
fn get_partial_res(&self, id: NodeId) -> Option<PartialRes>;
|
||||
fn get_import_res(&self, id: NodeId) -> PerNS<Option<Res<NodeId>>>;
|
||||
// Clones the resolution (if any) on 'source' and applies it
|
||||
// to 'target'. Used when desugaring a `UseTreeKind::Nested` to
|
||||
// multiple `UseTreeKind::Simple`s
|
||||
fn clone_res(&mut self, source: NodeId, target: NodeId);
|
||||
fn get_label_res(&self, id: NodeId) -> Option<NodeId>;
|
||||
fn get_lifetime_res(&self, id: NodeId) -> Option<LifetimeRes>;
|
||||
fn take_extra_lifetime_params(&mut self, id: NodeId) -> Vec<(Ident, NodeId, LifetimeRes)>;
|
||||
|
@ -176,12 +179,7 @@ impl ResolverAstLoweringExt for ResolverAstLowering {
|
|||
return None;
|
||||
}
|
||||
|
||||
let partial_res = self.partial_res_map.get(&expr.id)?;
|
||||
if partial_res.unresolved_segments() != 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Res::Def(DefKind::Fn, def_id) = partial_res.base_res() {
|
||||
if let Res::Def(DefKind::Fn, def_id) = self.partial_res_map.get(&expr.id)?.full_res()? {
|
||||
// We only support cross-crate argument rewriting. Uses
|
||||
// within the same crate should be updated to use the new
|
||||
// const generics style.
|
||||
|
@ -198,6 +196,12 @@ impl ResolverAstLoweringExt for ResolverAstLowering {
|
|||
None
|
||||
}
|
||||
|
||||
fn clone_res(&mut self, source: NodeId, target: NodeId) {
|
||||
if let Some(res) = self.partial_res_map.get(&source) {
|
||||
self.partial_res_map.insert(target, *res);
|
||||
}
|
||||
}
|
||||
|
||||
/// Obtains resolution for a `NodeId` with a single resolution.
|
||||
fn get_partial_res(&self, id: NodeId) -> Option<PartialRes> {
|
||||
self.partial_res_map.get(&id).copied()
|
||||
|
@ -502,6 +506,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
start
|
||||
}
|
||||
|
||||
/// Given the id of some node in the AST, finds the `LocalDefId` associated with it by the name
|
||||
/// resolver (if any).
|
||||
fn orig_opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
|
||||
self.resolver.node_id_to_def_id.get(&node).map(|local_def_id| *local_def_id)
|
||||
}
|
||||
|
||||
fn orig_local_def_id(&self, node: NodeId) -> LocalDefId {
|
||||
self.orig_opt_local_def_id(node)
|
||||
.unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node))
|
||||
}
|
||||
|
||||
/// Given the id of some node in the AST, finds the `LocalDefId` associated with it by the name
|
||||
/// resolver (if any), after applying any remapping from `get_remapped_def_id`.
|
||||
///
|
||||
|
@ -516,10 +531,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
/// we would create an opaque type `type FooReturn<'a1> = impl Debug + 'a1`.
|
||||
/// When lowering the `Debug + 'a` bounds, we add a remapping to map `'a` to `'a1`.
|
||||
fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
|
||||
self.resolver
|
||||
.node_id_to_def_id
|
||||
.get(&node)
|
||||
.map(|local_def_id| self.get_remapped_def_id(*local_def_id))
|
||||
self.orig_opt_local_def_id(node).map(|local_def_id| self.get_remapped_def_id(local_def_id))
|
||||
}
|
||||
|
||||
fn local_def_id(&self, node: NodeId) -> LocalDefId {
|
||||
|
@ -528,9 +540,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
/// Get the previously recorded `to` local def id given the `from` local def id, obtained using
|
||||
/// `generics_def_id_map` field.
|
||||
fn get_remapped_def_id(&self, mut local_def_id: LocalDefId) -> LocalDefId {
|
||||
fn get_remapped_def_id(&self, local_def_id: LocalDefId) -> LocalDefId {
|
||||
// `generics_def_id_map` is a stack of mappings. As we go deeper in impl traits nesting we
|
||||
// push new mappings so we need to try first the latest mappings, hence `iter().rev()`.
|
||||
// push new mappings, so we first need to get the latest (innermost) mappings, hence `iter().rev()`.
|
||||
//
|
||||
// Consider:
|
||||
//
|
||||
|
@ -540,18 +552,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
//
|
||||
// `[[fn#'b -> impl_trait#'b], [fn#'b -> impl_sized#'b]]`
|
||||
//
|
||||
// for the opaque type generated on `impl Sized + 'b`, We want the result to be:
|
||||
// impl_sized#'b, so iterating forward is the wrong thing to do.
|
||||
for map in self.generics_def_id_map.iter().rev() {
|
||||
if let Some(r) = map.get(&local_def_id) {
|
||||
debug!("def_id_remapper: remapping from `{local_def_id:?}` to `{r:?}`");
|
||||
local_def_id = *r;
|
||||
} else {
|
||||
debug!("def_id_remapper: no remapping for `{local_def_id:?}` found in map");
|
||||
}
|
||||
}
|
||||
|
||||
local_def_id
|
||||
// for the opaque type generated on `impl Sized + 'b`, we want the result to be: impl_sized#'b.
|
||||
// So, if we were trying to find first from the start (outermost) would give the wrong result, impl_trait#'b.
|
||||
self.generics_def_id_map
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(|map| map.get(&local_def_id).map(|local_def_id| *local_def_id))
|
||||
.unwrap_or(local_def_id)
|
||||
}
|
||||
|
||||
/// Freshen the `LoweringContext` and ready it to lower a nested item.
|
||||
|
@ -572,7 +579,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let current_node_ids = std::mem::take(&mut self.node_id_to_local_id);
|
||||
let current_id_to_def_id = std::mem::take(&mut self.local_id_to_def_id);
|
||||
let current_trait_map = std::mem::take(&mut self.trait_map);
|
||||
let current_owner = std::mem::replace(&mut self.current_hir_id_owner, def_id);
|
||||
let current_owner =
|
||||
std::mem::replace(&mut self.current_hir_id_owner, hir::OwnerId { def_id });
|
||||
let current_local_counter =
|
||||
std::mem::replace(&mut self.item_local_id_counter, hir::ItemLocalId::new(1));
|
||||
let current_impl_trait_defs = std::mem::take(&mut self.impl_trait_defs);
|
||||
|
@ -587,7 +595,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
debug_assert_eq!(_old, None);
|
||||
|
||||
let item = f(self);
|
||||
debug_assert_eq!(def_id, item.def_id());
|
||||
debug_assert_eq!(def_id, item.def_id().def_id);
|
||||
// `f` should have consumed all the elements in these vectors when constructing `item`.
|
||||
debug_assert!(self.impl_trait_defs.is_empty());
|
||||
debug_assert!(self.impl_trait_bounds.is_empty());
|
||||
|
@ -753,12 +761,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
|
||||
fn expect_full_res(&mut self, id: NodeId) -> Res<NodeId> {
|
||||
self.resolver.get_partial_res(id).map_or(Res::Err, |pr| {
|
||||
if pr.unresolved_segments() != 0 {
|
||||
panic!("path not fully resolved: {:?}", pr);
|
||||
}
|
||||
pr.base_res()
|
||||
})
|
||||
self.resolver.get_partial_res(id).map_or(Res::Err, |pr| pr.expect_full_res())
|
||||
}
|
||||
|
||||
fn expect_full_res_from_use(&mut self, id: NodeId) -> impl Iterator<Item = Res<NodeId>> {
|
||||
|
@ -786,7 +789,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
/// Mark a span as relative to the current owning item.
|
||||
fn lower_span(&self, span: Span) -> Span {
|
||||
if self.tcx.sess.opts.unstable_opts.incremental_relative_spans {
|
||||
span.with_parent(Some(self.current_hir_id_owner))
|
||||
span.with_parent(Some(self.current_hir_id_owner.def_id))
|
||||
} else {
|
||||
// Do not make spans relative when not using incremental compilation.
|
||||
span
|
||||
|
@ -812,7 +815,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
LifetimeRes::Fresh { param, .. } => {
|
||||
// Late resolution delegates to us the creation of the `LocalDefId`.
|
||||
let _def_id = self.create_def(
|
||||
self.current_hir_id_owner,
|
||||
self.current_hir_id_owner.def_id,
|
||||
param,
|
||||
DefPathData::LifetimeNs(kw::UnderscoreLifetime),
|
||||
);
|
||||
|
@ -1060,9 +1063,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// Desugar `AssocTy: Bounds` into `AssocTy = impl Bounds`. We do this by
|
||||
// constructing the HIR for `impl bounds...` and then lowering that.
|
||||
|
||||
let parent_def_id = self.current_hir_id_owner;
|
||||
let impl_trait_node_id = self.next_node_id();
|
||||
self.create_def(parent_def_id, impl_trait_node_id, DefPathData::ImplTrait);
|
||||
|
||||
self.with_dyn_type_scope(false, |this| {
|
||||
let node_id = this.next_node_id();
|
||||
|
@ -1140,8 +1141,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// type and value namespaces. If we resolved the path in the value namespace, we
|
||||
// transform it into a generic const argument.
|
||||
TyKind::Path(ref qself, ref path) => {
|
||||
if let Some(partial_res) = self.resolver.get_partial_res(ty.id) {
|
||||
let res = partial_res.base_res();
|
||||
if let Some(res) = self
|
||||
.resolver
|
||||
.get_partial_res(ty.id)
|
||||
.and_then(|partial_res| partial_res.full_res())
|
||||
{
|
||||
if !res.matches_ns(Namespace::TypeNS) {
|
||||
debug!(
|
||||
"lower_generic_arg: Lowering type argument as const argument: {:?}",
|
||||
|
@ -1154,7 +1158,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let node_id = self.next_node_id();
|
||||
|
||||
// Add a definition for the in-band const def.
|
||||
self.create_def(parent_def_id, node_id, DefPathData::AnonConst);
|
||||
self.create_def(
|
||||
parent_def_id.def_id,
|
||||
node_id,
|
||||
DefPathData::AnonConst,
|
||||
);
|
||||
|
||||
let span = self.lower_span(ty.span);
|
||||
let path_expr = Expr {
|
||||
|
@ -1204,8 +1212,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// by `ty_path`.
|
||||
if qself.is_none()
|
||||
&& let Some(partial_res) = self.resolver.get_partial_res(t.id)
|
||||
&& partial_res.unresolved_segments() == 0
|
||||
&& let Res::Def(DefKind::Trait | DefKind::TraitAlias, _) = partial_res.base_res()
|
||||
&& let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) = partial_res.full_res()
|
||||
{
|
||||
let (bounds, lifetime_bound) = self.with_dyn_type_scope(true, |this| {
|
||||
let poly_trait_ref = this.ast_arena.ptr.alloc(PolyTraitRef {
|
||||
|
@ -1349,9 +1356,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
def_node_id,
|
||||
bounds,
|
||||
false,
|
||||
&ImplTraitContext::TypeAliasesOpaqueTy,
|
||||
itctx,
|
||||
),
|
||||
ImplTraitContext::Universal => {
|
||||
self.create_def(
|
||||
self.current_hir_id_owner.def_id,
|
||||
def_node_id,
|
||||
DefPathData::ImplTrait,
|
||||
);
|
||||
let span = t.span;
|
||||
let ident = Ident::from_str_and_span(&pprust::ty_to_string(t), span);
|
||||
let (param, bounds, path) =
|
||||
|
@ -1445,7 +1457,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// frequently opened issues show.
|
||||
let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::OpaqueTy, span, None);
|
||||
|
||||
let opaque_ty_def_id = self.local_def_id(opaque_ty_node_id);
|
||||
let opaque_ty_def_id = match origin {
|
||||
hir::OpaqueTyOrigin::TyAlias => self.create_def(
|
||||
self.current_hir_id_owner.def_id,
|
||||
opaque_ty_node_id,
|
||||
DefPathData::ImplTrait,
|
||||
),
|
||||
hir::OpaqueTyOrigin::FnReturn(fn_def_id) => {
|
||||
self.create_def(fn_def_id, opaque_ty_node_id, DefPathData::ImplTrait)
|
||||
}
|
||||
hir::OpaqueTyOrigin::AsyncFn(..) => bug!("unreachable"),
|
||||
};
|
||||
debug!(?opaque_ty_def_id);
|
||||
|
||||
// Contains the new lifetime definitions created for the TAIT (if any).
|
||||
|
@ -1551,7 +1573,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
debug!(?lifetimes);
|
||||
|
||||
// `impl Trait` now just becomes `Foo<'a, 'b, ..>`.
|
||||
hir::TyKind::OpaqueDef(hir::ItemId { def_id: opaque_ty_def_id }, lifetimes, in_trait)
|
||||
hir::TyKind::OpaqueDef(
|
||||
hir::ItemId { def_id: hir::OwnerId { def_id: opaque_ty_def_id } },
|
||||
lifetimes,
|
||||
in_trait,
|
||||
)
|
||||
}
|
||||
|
||||
/// Registers a new opaque type with the proper `NodeId`s and
|
||||
|
@ -1567,7 +1593,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// Generate an `type Foo = impl Trait;` declaration.
|
||||
trace!("registering opaque type with id {:#?}", opaque_ty_id);
|
||||
let opaque_ty_item = hir::Item {
|
||||
def_id: opaque_ty_id,
|
||||
def_id: hir::OwnerId { def_id: opaque_ty_id },
|
||||
ident: Ident::empty(),
|
||||
kind: opaque_ty_item_kind,
|
||||
vis_span: self.lower_span(span.shrink_to_lo()),
|
||||
|
@ -1610,7 +1636,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
LifetimeRes::Fresh { param, binder: _ } => {
|
||||
debug_assert_eq!(lifetime.ident.name, kw::UnderscoreLifetime);
|
||||
if let Some(old_def_id) = self.opt_local_def_id(param) && remapping.get(&old_def_id).is_none() {
|
||||
if let Some(old_def_id) = self.orig_opt_local_def_id(param) && remapping.get(&old_def_id).is_none() {
|
||||
let node_id = self.next_node_id();
|
||||
|
||||
let new_def_id = self.create_def(
|
||||
|
@ -1855,7 +1881,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let extra_lifetime_params = self.resolver.take_extra_lifetime_params(opaque_ty_node_id);
|
||||
debug!(?extra_lifetime_params);
|
||||
for (ident, outer_node_id, outer_res) in extra_lifetime_params {
|
||||
let outer_def_id = self.local_def_id(outer_node_id);
|
||||
let outer_def_id = self.orig_local_def_id(outer_node_id);
|
||||
let inner_node_id = self.next_node_id();
|
||||
|
||||
// Add a definition for the in scope lifetime def.
|
||||
|
@ -2018,7 +2044,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// async fn, so the *type parameters* are inherited. It's
|
||||
// only the lifetime parameters that we must supply.
|
||||
let opaque_ty_ref = hir::TyKind::OpaqueDef(
|
||||
hir::ItemId { def_id: opaque_ty_def_id },
|
||||
hir::ItemId { def_id: hir::OwnerId { def_id: opaque_ty_def_id } },
|
||||
generic_args,
|
||||
in_trait,
|
||||
);
|
||||
|
|
|
@ -239,7 +239,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
ident: Ident,
|
||||
lower_sub: impl FnOnce(&mut Self) -> Option<&'hir hir::Pat<'hir>>,
|
||||
) -> hir::PatKind<'hir> {
|
||||
match self.resolver.get_partial_res(p.id).map(|d| d.base_res()) {
|
||||
match self.resolver.get_partial_res(p.id).map(|d| d.expect_full_res()) {
|
||||
// `None` can occur in body-less function signatures
|
||||
res @ (None | Some(Res::Local(_))) => {
|
||||
let canonical_id = match res {
|
||||
|
|
|
@ -29,11 +29,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
let partial_res =
|
||||
self.resolver.get_partial_res(id).unwrap_or_else(|| PartialRes::new(Res::Err));
|
||||
let base_res = partial_res.base_res();
|
||||
let unresolved_segments = partial_res.unresolved_segments();
|
||||
|
||||
let path_span_lo = p.span.shrink_to_lo();
|
||||
let proj_start = p.segments.len() - partial_res.unresolved_segments();
|
||||
let proj_start = p.segments.len() - unresolved_segments;
|
||||
let path = self.arena.alloc(hir::Path {
|
||||
res: self.lower_res(partial_res.base_res()),
|
||||
res: self.lower_res(base_res),
|
||||
segments: self.arena.alloc_from_iter(p.segments[..proj_start].iter().enumerate().map(
|
||||
|(i, segment)| {
|
||||
let param_mode = match (qself_position, param_mode) {
|
||||
|
@ -46,7 +48,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
_ => param_mode,
|
||||
};
|
||||
|
||||
let parenthesized_generic_args = match partial_res.base_res() {
|
||||
let parenthesized_generic_args = match base_res {
|
||||
// `a::b::Trait(Args)`
|
||||
Res::Def(DefKind::Trait, _) if i + 1 == proj_start => {
|
||||
ParenthesizedGenericArgs::Ok
|
||||
|
@ -83,7 +85,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
// Simple case, either no projections, or only fully-qualified.
|
||||
// E.g., `std::mem::size_of` or `<I as Iterator>::Item`.
|
||||
if partial_res.unresolved_segments() == 0 {
|
||||
if unresolved_segments == 0 {
|
||||
return hir::QPath::Resolved(qself, path);
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,13 @@ enum SelfSemantic {
|
|||
No,
|
||||
}
|
||||
|
||||
/// What is the context that prevents using `~const`?
|
||||
enum DisallowTildeConstContext<'a> {
|
||||
TraitObject,
|
||||
ImplTrait,
|
||||
Fn(FnKind<'a>),
|
||||
}
|
||||
|
||||
struct AstValidator<'a> {
|
||||
session: &'a Session,
|
||||
|
||||
|
@ -56,7 +63,7 @@ struct AstValidator<'a> {
|
|||
/// e.g., `impl Iterator<Item = impl Debug>`.
|
||||
outer_impl_trait: Option<Span>,
|
||||
|
||||
is_tilde_const_allowed: bool,
|
||||
disallow_tilde_const: Option<DisallowTildeConstContext<'a>>,
|
||||
|
||||
/// Used to ban `impl Trait` in path projections like `<impl Iterator>::Item`
|
||||
/// or `Foo::Bar<impl Trait>`
|
||||
|
@ -93,18 +100,26 @@ impl<'a> AstValidator<'a> {
|
|||
self.is_impl_trait_banned = old;
|
||||
}
|
||||
|
||||
fn with_tilde_const(&mut self, allowed: bool, f: impl FnOnce(&mut Self)) {
|
||||
let old = mem::replace(&mut self.is_tilde_const_allowed, allowed);
|
||||
fn with_tilde_const(
|
||||
&mut self,
|
||||
disallowed: Option<DisallowTildeConstContext<'a>>,
|
||||
f: impl FnOnce(&mut Self),
|
||||
) {
|
||||
let old = mem::replace(&mut self.disallow_tilde_const, disallowed);
|
||||
f(self);
|
||||
self.is_tilde_const_allowed = old;
|
||||
self.disallow_tilde_const = old;
|
||||
}
|
||||
|
||||
fn with_tilde_const_allowed(&mut self, f: impl FnOnce(&mut Self)) {
|
||||
self.with_tilde_const(true, f)
|
||||
self.with_tilde_const(None, f)
|
||||
}
|
||||
|
||||
fn with_banned_tilde_const(&mut self, f: impl FnOnce(&mut Self)) {
|
||||
self.with_tilde_const(false, f)
|
||||
fn with_banned_tilde_const(
|
||||
&mut self,
|
||||
ctx: DisallowTildeConstContext<'a>,
|
||||
f: impl FnOnce(&mut Self),
|
||||
) {
|
||||
self.with_tilde_const(Some(ctx), f)
|
||||
}
|
||||
|
||||
fn with_let_management(
|
||||
|
@ -172,7 +187,7 @@ impl<'a> AstValidator<'a> {
|
|||
fn with_impl_trait(&mut self, outer: Option<Span>, f: impl FnOnce(&mut Self)) {
|
||||
let old = mem::replace(&mut self.outer_impl_trait, outer);
|
||||
if outer.is_some() {
|
||||
self.with_banned_tilde_const(f);
|
||||
self.with_banned_tilde_const(DisallowTildeConstContext::ImplTrait, f);
|
||||
} else {
|
||||
f(self);
|
||||
}
|
||||
|
@ -197,7 +212,10 @@ impl<'a> AstValidator<'a> {
|
|||
TyKind::ImplTrait(..) => {
|
||||
self.with_impl_trait(Some(t.span), |this| visit::walk_ty(this, t))
|
||||
}
|
||||
TyKind::TraitObject(..) => self.with_banned_tilde_const(|this| visit::walk_ty(this, t)),
|
||||
TyKind::TraitObject(..) => self
|
||||
.with_banned_tilde_const(DisallowTildeConstContext::TraitObject, |this| {
|
||||
visit::walk_ty(this, t)
|
||||
}),
|
||||
TyKind::Path(ref qself, ref path) => {
|
||||
// We allow these:
|
||||
// - `Option<impl Trait>`
|
||||
|
@ -1411,13 +1429,15 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
);
|
||||
err.emit();
|
||||
}
|
||||
(_, TraitBoundModifier::MaybeConst) => {
|
||||
if !self.is_tilde_const_allowed {
|
||||
self.err_handler()
|
||||
.struct_span_err(bound.span(), "`~const` is not allowed here")
|
||||
.note("only allowed on bounds on traits' associated types and functions, const fns, const impls and its associated functions")
|
||||
.emit();
|
||||
}
|
||||
(_, TraitBoundModifier::MaybeConst) if let Some(reason) = &self.disallow_tilde_const => {
|
||||
let mut err = self.err_handler().struct_span_err(bound.span(), "`~const` is not allowed here");
|
||||
match reason {
|
||||
DisallowTildeConstContext::TraitObject => err.note("trait objects cannot have `~const` trait bounds"),
|
||||
DisallowTildeConstContext::ImplTrait => err.note("`impl Trait`s cannot have `~const` trait bounds"),
|
||||
DisallowTildeConstContext::Fn(FnKind::Closure(..)) => err.note("closures cannot have `~const` trait bounds"),
|
||||
DisallowTildeConstContext::Fn(FnKind::Fn(_, ident, ..)) => err.span_note(ident.span, "this function is not `const`, so it cannot have `~const` trait bounds"),
|
||||
};
|
||||
err.emit();
|
||||
}
|
||||
(_, TraitBoundModifier::MaybeConstMaybe) => {
|
||||
self.err_handler()
|
||||
|
@ -1524,10 +1544,12 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
}
|
||||
|
||||
let tilde_const_allowed =
|
||||
matches!(fk.header(), Some(FnHeader { constness: Const::Yes(_), .. }))
|
||||
matches!(fk.header(), Some(FnHeader { constness: ast::Const::Yes(_), .. }))
|
||||
|| matches!(fk.ctxt(), Some(FnCtxt::Assoc(_)));
|
||||
|
||||
self.with_tilde_const(tilde_const_allowed, |this| visit::walk_fn(this, fk));
|
||||
let disallowed = (!tilde_const_allowed).then(|| DisallowTildeConstContext::Fn(fk));
|
||||
|
||||
self.with_tilde_const(disallowed, |this| visit::walk_fn(this, fk));
|
||||
}
|
||||
|
||||
fn visit_assoc_item(&mut self, item: &'a AssocItem, ctxt: AssocCtxt) {
|
||||
|
@ -1557,7 +1579,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
});
|
||||
}
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAlias {
|
||||
AssocItemKind::Type(box TyAlias {
|
||||
generics,
|
||||
where_clauses,
|
||||
where_predicates_split,
|
||||
|
@ -1596,7 +1618,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
}
|
||||
|
||||
match item.kind {
|
||||
AssocItemKind::TyAlias(box TyAlias { ref generics, ref bounds, ref ty, .. })
|
||||
AssocItemKind::Type(box TyAlias { ref generics, ref bounds, ref ty, .. })
|
||||
if ctxt == AssocCtxt::Trait =>
|
||||
{
|
||||
self.visit_vis(&item.vis);
|
||||
|
@ -1771,7 +1793,7 @@ pub fn check_crate(session: &Session, krate: &Crate, lints: &mut LintBuffer) ->
|
|||
in_const_trait_impl: false,
|
||||
has_proc_macro_decls: false,
|
||||
outer_impl_trait: None,
|
||||
is_tilde_const_allowed: false,
|
||||
disallow_tilde_const: None,
|
||||
is_impl_trait_banned: false,
|
||||
is_assoc_ty_bound_banned: false,
|
||||
forbidden_let_reason: Some(ForbiddenLetReason::GenericForbidden),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Errors emitted by ast_passes.
|
||||
|
||||
use rustc_errors::{fluent, AddToDiagnostic, Applicability, Diagnostic};
|
||||
use rustc_errors::{fluent, AddToDiagnostic, Applicability, Diagnostic, SubdiagnosticMessage};
|
||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||
use rustc_span::{Span, Symbol};
|
||||
|
||||
|
@ -17,7 +17,10 @@ pub struct ForbiddenLet {
|
|||
}
|
||||
|
||||
impl AddToDiagnostic for ForbiddenLetReason {
|
||||
fn add_to_diagnostic(self, diag: &mut Diagnostic) {
|
||||
fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
|
||||
where
|
||||
F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
|
||||
{
|
||||
match self {
|
||||
Self::GenericForbidden => {}
|
||||
Self::NotSupportedOr(span) => {
|
||||
|
@ -228,7 +231,10 @@ pub struct ExternBlockSuggestion {
|
|||
}
|
||||
|
||||
impl AddToDiagnostic for ExternBlockSuggestion {
|
||||
fn add_to_diagnostic(self, diag: &mut Diagnostic) {
|
||||
fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
|
||||
where
|
||||
F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
|
||||
{
|
||||
let start_suggestion = if let Some(abi) = self.abi {
|
||||
format!("extern \"{}\" {{", abi)
|
||||
} else {
|
||||
|
|
|
@ -3,13 +3,13 @@ use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
|
|||
use rustc_ast::{AssocConstraint, AssocConstraintKind, NodeId};
|
||||
use rustc_ast::{PatKind, RangeEnd, VariantData};
|
||||
use rustc_errors::{struct_span_err, Applicability, StashKey};
|
||||
use rustc_feature::Features;
|
||||
use rustc_feature::{AttributeGate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
|
||||
use rustc_session::parse::{feature_err, feature_warn};
|
||||
use rustc_feature::{AttributeGate, BuiltinAttribute, Features, GateIssue, BUILTIN_ATTRIBUTE_MAP};
|
||||
use rustc_session::parse::{feature_err, feature_err_issue, feature_warn};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Span;
|
||||
use rustc_target::spec::abi;
|
||||
|
||||
macro_rules! gate_feature_fn {
|
||||
($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $help: expr) => {{
|
||||
|
@ -84,210 +84,26 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
match symbol_unescaped.as_str() {
|
||||
// Stable
|
||||
"Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64"
|
||||
| "system" => {}
|
||||
"rust-intrinsic" => {
|
||||
gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change");
|
||||
}
|
||||
"platform-intrinsic" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
platform_intrinsics,
|
||||
match abi::is_enabled(&self.features, span, symbol_unescaped.as_str()) {
|
||||
Ok(()) => (),
|
||||
Err(abi::AbiDisabled::Unstable { feature, explain }) => {
|
||||
feature_err_issue(
|
||||
&self.sess.parse_sess,
|
||||
feature,
|
||||
span,
|
||||
"platform intrinsics are experimental and possibly buggy"
|
||||
);
|
||||
GateIssue::Language,
|
||||
explain,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
"vectorcall" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_vectorcall,
|
||||
span,
|
||||
"vectorcall is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"thiscall" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_thiscall,
|
||||
span,
|
||||
"thiscall is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"rust-call" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
unboxed_closures,
|
||||
span,
|
||||
"rust-call ABI is subject to change"
|
||||
);
|
||||
}
|
||||
"rust-cold" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
rust_cold_cc,
|
||||
span,
|
||||
"rust-cold is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"ptx-kernel" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_ptx,
|
||||
span,
|
||||
"PTX ABIs are experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"unadjusted" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_unadjusted,
|
||||
span,
|
||||
"unadjusted ABI is an implementation detail and perma-unstable"
|
||||
);
|
||||
}
|
||||
"msp430-interrupt" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_msp430_interrupt,
|
||||
span,
|
||||
"msp430-interrupt ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"x86-interrupt" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_x86_interrupt,
|
||||
span,
|
||||
"x86-interrupt ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"amdgpu-kernel" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_amdgpu_kernel,
|
||||
span,
|
||||
"amdgpu-kernel ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"avr-interrupt" | "avr-non-blocking-interrupt" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_avr_interrupt,
|
||||
span,
|
||||
"avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"efiapi" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_efiapi,
|
||||
span,
|
||||
"efiapi ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"C-cmse-nonsecure-call" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_c_cmse_nonsecure_call,
|
||||
span,
|
||||
"C-cmse-nonsecure-call ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"C-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"C-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"stdcall-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"stdcall-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"system-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"system-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"thiscall-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"thiscall-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"cdecl-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"cdecl-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"fastcall-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"fastcall-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"vectorcall-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"vectorcall-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"aapcs-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"aapcs-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"win64-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"win64-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"sysv64-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"sysv64-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"wasm" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
wasm_abi,
|
||||
span,
|
||||
"wasm ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
abi => {
|
||||
Err(abi::AbiDisabled::Unrecognized) => {
|
||||
if self.sess.opts.pretty.map_or(true, |ppm| ppm.needs_hir()) {
|
||||
self.sess.parse_sess.span_diagnostic.delay_span_bug(
|
||||
span,
|
||||
&format!("unrecognized ABI not caught in lowering: {}", abi),
|
||||
&format!(
|
||||
"unrecognized ABI not caught in lowering: {}",
|
||||
symbol_unescaped.as_str()
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -645,7 +461,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||
if let PatKind::Range(Some(_), None, Spanned { .. }) = inner_pat.kind {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
half_open_range_patterns,
|
||||
half_open_range_patterns_in_slices,
|
||||
pat.span,
|
||||
"`X..` patterns in slices are experimental"
|
||||
);
|
||||
|
@ -701,7 +517,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||
fn visit_assoc_item(&mut self, i: &'a ast::AssocItem, ctxt: AssocCtxt) {
|
||||
let is_fn = match i.kind {
|
||||
ast::AssocItemKind::Fn(_) => true,
|
||||
ast::AssocItemKind::TyAlias(box ast::TyAlias { ref ty, .. }) => {
|
||||
ast::AssocItemKind::Type(box ast::TyAlias { ref ty, .. }) => {
|
||||
if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
|
@ -773,7 +589,10 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
|||
gate_all!(generators, "yield syntax is experimental");
|
||||
gate_all!(raw_ref_op, "raw address of syntax is experimental");
|
||||
gate_all!(const_trait_impl, "const trait impls are experimental");
|
||||
gate_all!(half_open_range_patterns, "half-open range patterns are unstable");
|
||||
gate_all!(
|
||||
half_open_range_patterns_in_slices,
|
||||
"half-open range patterns in slices are unstable"
|
||||
);
|
||||
gate_all!(inline_const, "inline-const is experimental");
|
||||
gate_all!(inline_const_pat, "inline-const in pattern position is experimental");
|
||||
gate_all!(associated_const_equality, "associated const equality is incomplete");
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#![feature(if_let_guard)]
|
||||
#![feature(iter_is_partitioned)]
|
||||
#![feature(let_chains)]
|
||||
#![cfg_attr(bootstrap, feature(let_else))]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
#[macro_use]
|
||||
|
|
|
@ -4,7 +4,6 @@ version = "0.0.0"
|
|||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
|
|
|
@ -516,7 +516,7 @@ impl<'a> State<'a> {
|
|||
ast::AssocItemKind::Const(def, ty, body) => {
|
||||
self.print_item_const(ident, None, ty, body.as_deref(), vis, *def);
|
||||
}
|
||||
ast::AssocItemKind::TyAlias(box ast::TyAlias {
|
||||
ast::AssocItemKind::Type(box ast::TyAlias {
|
||||
defaultness,
|
||||
generics,
|
||||
where_clauses,
|
||||
|
|
|
@ -4,7 +4,6 @@ version = "0.0.0"
|
|||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
//! to this crate.
|
||||
|
||||
#![feature(let_chains)]
|
||||
#![cfg_attr(bootstrap, feature(let_else))]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ version = "0.0.0"
|
|||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
either = "1.5.0"
|
||||
|
|
|
@ -14,8 +14,8 @@ use crate::{
|
|||
places_conflict, region_infer::values::LivenessValues,
|
||||
};
|
||||
|
||||
pub(super) fn generate_constraints<'cx, 'tcx>(
|
||||
infcx: &InferCtxt<'cx, 'tcx>,
|
||||
pub(super) fn generate_constraints<'tcx>(
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
liveness_constraints: &mut LivenessValues<RegionVid>,
|
||||
all_facts: &mut Option<AllFacts>,
|
||||
location_table: &LocationTable,
|
||||
|
@ -37,8 +37,8 @@ pub(super) fn generate_constraints<'cx, 'tcx>(
|
|||
}
|
||||
|
||||
/// 'cg = the duration of the constraint generation process itself.
|
||||
struct ConstraintGeneration<'cg, 'cx, 'tcx> {
|
||||
infcx: &'cg InferCtxt<'cx, 'tcx>,
|
||||
struct ConstraintGeneration<'cg, 'tcx> {
|
||||
infcx: &'cg InferCtxt<'tcx>,
|
||||
all_facts: &'cg mut Option<AllFacts>,
|
||||
location_table: &'cg LocationTable,
|
||||
liveness_constraints: &'cg mut LivenessValues<RegionVid>,
|
||||
|
@ -46,7 +46,7 @@ struct ConstraintGeneration<'cg, 'cx, 'tcx> {
|
|||
body: &'cg Body<'tcx>,
|
||||
}
|
||||
|
||||
impl<'cg, 'cx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'tcx> {
|
||||
impl<'cg, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'tcx> {
|
||||
fn visit_basic_block_data(&mut self, bb: BasicBlock, data: &BasicBlockData<'tcx>) {
|
||||
self.super_basic_block_data(bb, data);
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ impl<'cg, 'cx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> {
|
||||
impl<'cx, 'tcx> ConstraintGeneration<'cx, 'tcx> {
|
||||
/// Some variable with type `live_ty` is "regular live" at
|
||||
/// `location` -- i.e., it may be used later. This means that all
|
||||
/// regions appearing in the type `live_ty` must be live at
|
||||
|
|
|
@ -92,7 +92,7 @@ pub struct OutlivesConstraint<'tcx> {
|
|||
pub span: Span,
|
||||
|
||||
/// What caused this constraint?
|
||||
pub category: ConstraintCategory<'tcx>,
|
||||
pub category: ConstraintCategory,
|
||||
|
||||
/// Variance diagnostic information
|
||||
pub variance_info: VarianceDiagInfo<'tcx>,
|
||||
|
|
|
@ -31,9 +31,8 @@ pub fn get_body_with_borrowck_facts<'tcx>(
|
|||
def: ty::WithOptConstParam<LocalDefId>,
|
||||
) -> BodyWithBorrowckFacts<'tcx> {
|
||||
let (input_body, promoted) = tcx.mir_promoted(def);
|
||||
tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bind(def.did)).enter(|infcx| {
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
*super::do_mir_borrowck(&infcx, input_body, promoted, true).1.unwrap()
|
||||
})
|
||||
let infcx = tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bind(def.did)).build();
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
*super::do_mir_borrowck(&infcx, input_body, promoted, true).1.unwrap()
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ impl<'tcx> UniverseInfo<'tcx> {
|
|||
) {
|
||||
match self.0 {
|
||||
UniverseInfoInner::RelateTys { expected, found } => {
|
||||
let err = mbcx.infcx.report_mismatched_types(
|
||||
let err = mbcx.infcx.err_ctxt().report_mismatched_types(
|
||||
&cause,
|
||||
expected,
|
||||
found,
|
||||
|
@ -238,20 +238,11 @@ impl<'tcx> TypeOpInfo<'tcx> for PredicateQuery<'tcx> {
|
|||
placeholder_region: ty::Region<'tcx>,
|
||||
error_region: Option<ty::Region<'tcx>>,
|
||||
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
|
||||
mbcx.infcx.tcx.infer_ctxt().enter_with_canonical(
|
||||
cause.span,
|
||||
&self.canonical_query,
|
||||
|ref infcx, key, _| {
|
||||
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
|
||||
type_op_prove_predicate_with_cause(infcx, &mut *fulfill_cx, key, cause);
|
||||
try_extract_error_from_fulfill_cx(
|
||||
fulfill_cx,
|
||||
infcx,
|
||||
placeholder_region,
|
||||
error_region,
|
||||
)
|
||||
},
|
||||
)
|
||||
let (ref infcx, key, _) =
|
||||
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
|
||||
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
|
||||
type_op_prove_predicate_with_cause(infcx, &mut *fulfill_cx, key, cause);
|
||||
try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -288,37 +279,24 @@ where
|
|||
placeholder_region: ty::Region<'tcx>,
|
||||
error_region: Option<ty::Region<'tcx>>,
|
||||
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
|
||||
mbcx.infcx.tcx.infer_ctxt().enter_with_canonical(
|
||||
cause.span,
|
||||
&self.canonical_query,
|
||||
|ref infcx, key, _| {
|
||||
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
|
||||
let (ref infcx, key, _) =
|
||||
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
|
||||
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
|
||||
|
||||
let mut selcx = SelectionContext::new(infcx);
|
||||
let mut selcx = SelectionContext::new(infcx);
|
||||
|
||||
// FIXME(lqd): Unify and de-duplicate the following with the actual
|
||||
// `rustc_traits::type_op::type_op_normalize` query to allow the span we need in the
|
||||
// `ObligationCause`. The normalization results are currently different between
|
||||
// `AtExt::normalize` used in the query and `normalize` called below: the former fails
|
||||
// to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test. Check
|
||||
// after #85499 lands to see if its fixes have erased this difference.
|
||||
let (param_env, value) = key.into_parts();
|
||||
let Normalized { value: _, obligations } = rustc_trait_selection::traits::normalize(
|
||||
&mut selcx,
|
||||
param_env,
|
||||
cause,
|
||||
value.value,
|
||||
);
|
||||
fulfill_cx.register_predicate_obligations(infcx, obligations);
|
||||
// FIXME(lqd): Unify and de-duplicate the following with the actual
|
||||
// `rustc_traits::type_op::type_op_normalize` query to allow the span we need in the
|
||||
// `ObligationCause`. The normalization results are currently different between
|
||||
// `AtExt::normalize` used in the query and `normalize` called below: the former fails
|
||||
// to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test. Check
|
||||
// after #85499 lands to see if its fixes have erased this difference.
|
||||
let (param_env, value) = key.into_parts();
|
||||
let Normalized { value: _, obligations } =
|
||||
rustc_trait_selection::traits::normalize(&mut selcx, param_env, cause, value.value);
|
||||
fulfill_cx.register_predicate_obligations(infcx, obligations);
|
||||
|
||||
try_extract_error_from_fulfill_cx(
|
||||
fulfill_cx,
|
||||
infcx,
|
||||
placeholder_region,
|
||||
error_region,
|
||||
)
|
||||
},
|
||||
)
|
||||
try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -349,21 +327,11 @@ impl<'tcx> TypeOpInfo<'tcx> for AscribeUserTypeQuery<'tcx> {
|
|||
placeholder_region: ty::Region<'tcx>,
|
||||
error_region: Option<ty::Region<'tcx>>,
|
||||
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
|
||||
mbcx.infcx.tcx.infer_ctxt().enter_with_canonical(
|
||||
cause.span,
|
||||
&self.canonical_query,
|
||||
|ref infcx, key, _| {
|
||||
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
|
||||
type_op_ascribe_user_type_with_span(infcx, &mut *fulfill_cx, key, Some(cause.span))
|
||||
.ok()?;
|
||||
try_extract_error_from_fulfill_cx(
|
||||
fulfill_cx,
|
||||
infcx,
|
||||
placeholder_region,
|
||||
error_region,
|
||||
)
|
||||
},
|
||||
)
|
||||
let (ref infcx, key, _) =
|
||||
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
|
||||
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
|
||||
type_op_ascribe_user_type_with_span(infcx, &mut *fulfill_cx, key, Some(cause.span)).ok()?;
|
||||
try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -407,7 +375,7 @@ impl<'tcx> TypeOpInfo<'tcx> for crate::type_check::InstantiateOpaqueType<'tcx> {
|
|||
#[instrument(skip(fulfill_cx, infcx), level = "debug")]
|
||||
fn try_extract_error_from_fulfill_cx<'tcx>(
|
||||
mut fulfill_cx: Box<dyn TraitEngine<'tcx> + 'tcx>,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
placeholder_region: ty::Region<'tcx>,
|
||||
error_region: Option<ty::Region<'tcx>>,
|
||||
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
|
||||
|
@ -427,7 +395,7 @@ fn try_extract_error_from_fulfill_cx<'tcx>(
|
|||
}
|
||||
|
||||
fn try_extract_error_from_region_constraints<'tcx>(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
placeholder_region: ty::Region<'tcx>,
|
||||
error_region: Option<ty::Region<'tcx>>,
|
||||
region_constraints: &RegionConstraintData<'tcx>,
|
||||
|
@ -449,42 +417,38 @@ fn try_extract_error_from_region_constraints<'tcx>(
|
|||
})?;
|
||||
|
||||
debug!(?sub_region, "cause = {:#?}", cause);
|
||||
let nice_error = match (error_region, *sub_region) {
|
||||
(Some(error_region), ty::ReVar(vid)) => NiceRegionError::new(
|
||||
infcx,
|
||||
RegionResolutionError::SubSupConflict(
|
||||
vid,
|
||||
region_var_origin(vid),
|
||||
cause.clone(),
|
||||
error_region,
|
||||
cause.clone(),
|
||||
placeholder_region,
|
||||
vec![],
|
||||
),
|
||||
),
|
||||
(Some(error_region), _) => NiceRegionError::new(
|
||||
infcx,
|
||||
RegionResolutionError::ConcreteFailure(cause.clone(), error_region, placeholder_region),
|
||||
let error = match (error_region, *sub_region) {
|
||||
(Some(error_region), ty::ReVar(vid)) => RegionResolutionError::SubSupConflict(
|
||||
vid,
|
||||
region_var_origin(vid),
|
||||
cause.clone(),
|
||||
error_region,
|
||||
cause.clone(),
|
||||
placeholder_region,
|
||||
vec![],
|
||||
),
|
||||
(Some(error_region), _) => {
|
||||
RegionResolutionError::ConcreteFailure(cause.clone(), error_region, placeholder_region)
|
||||
}
|
||||
// Note universe here is wrong...
|
||||
(None, ty::ReVar(vid)) => NiceRegionError::new(
|
||||
infcx,
|
||||
RegionResolutionError::UpperBoundUniverseConflict(
|
||||
vid,
|
||||
region_var_origin(vid),
|
||||
universe_of_region(vid),
|
||||
cause.clone(),
|
||||
placeholder_region,
|
||||
),
|
||||
),
|
||||
(None, _) => NiceRegionError::new(
|
||||
infcx,
|
||||
RegionResolutionError::ConcreteFailure(cause.clone(), sub_region, placeholder_region),
|
||||
(None, ty::ReVar(vid)) => RegionResolutionError::UpperBoundUniverseConflict(
|
||||
vid,
|
||||
region_var_origin(vid),
|
||||
universe_of_region(vid),
|
||||
cause.clone(),
|
||||
placeholder_region,
|
||||
),
|
||||
(None, _) => {
|
||||
RegionResolutionError::ConcreteFailure(cause.clone(), sub_region, placeholder_region)
|
||||
}
|
||||
};
|
||||
nice_error.try_report_from_nll().or_else(|| {
|
||||
NiceRegionError::new(&infcx.err_ctxt(), error).try_report_from_nll().or_else(|| {
|
||||
if let SubregionOrigin::Subtype(trace) = cause {
|
||||
Some(infcx.report_and_explain_type_error(*trace, TypeError::RegionsPlaceholderMismatch))
|
||||
Some(
|
||||
infcx
|
||||
.err_ctxt()
|
||||
.report_and_explain_type_error(*trace, TypeError::RegionsPlaceholderMismatch),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -198,7 +198,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
move_span,
|
||||
move_spans,
|
||||
*moved_place,
|
||||
Some(used_place),
|
||||
partially_str,
|
||||
loop_message,
|
||||
move_msg,
|
||||
|
@ -369,6 +368,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
let mut visitor = ConditionVisitor { spans: &spans, name: &name, errors: vec![] };
|
||||
visitor.visit_body(&body);
|
||||
|
||||
let mut show_assign_sugg = false;
|
||||
let isnt_initialized = if let InitializationRequiringAction::PartialAssignment
|
||||
| InitializationRequiringAction::Assignment = desired_action
|
||||
{
|
||||
|
@ -396,6 +396,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
.count()
|
||||
== 0
|
||||
{
|
||||
show_assign_sugg = true;
|
||||
"isn't initialized"
|
||||
} else {
|
||||
"is possibly-uninitialized"
|
||||
|
@ -446,10 +447,84 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
err.span_label(decl_span, "binding declared here but left uninitialized");
|
||||
if show_assign_sugg {
|
||||
struct LetVisitor {
|
||||
decl_span: Span,
|
||||
sugg_span: Option<Span>,
|
||||
}
|
||||
|
||||
impl<'v> Visitor<'v> for LetVisitor {
|
||||
fn visit_stmt(&mut self, ex: &'v hir::Stmt<'v>) {
|
||||
if self.sugg_span.is_some() {
|
||||
return;
|
||||
}
|
||||
if let hir::StmtKind::Local(hir::Local {
|
||||
span, ty, init: None, ..
|
||||
}) = &ex.kind && span.contains(self.decl_span) {
|
||||
self.sugg_span = ty.map_or(Some(self.decl_span), |ty| Some(ty.span));
|
||||
}
|
||||
hir::intravisit::walk_stmt(self, ex);
|
||||
}
|
||||
}
|
||||
|
||||
let mut visitor = LetVisitor { decl_span, sugg_span: None };
|
||||
visitor.visit_body(&body);
|
||||
if let Some(span) = visitor.sugg_span {
|
||||
self.suggest_assign_value(&mut err, moved_place, span);
|
||||
}
|
||||
}
|
||||
err
|
||||
}
|
||||
|
||||
fn suggest_assign_value(
|
||||
&self,
|
||||
err: &mut Diagnostic,
|
||||
moved_place: PlaceRef<'tcx>,
|
||||
sugg_span: Span,
|
||||
) {
|
||||
let ty = moved_place.ty(self.body, self.infcx.tcx).ty;
|
||||
debug!("ty: {:?}, kind: {:?}", ty, ty.kind());
|
||||
|
||||
let tcx = self.infcx.tcx;
|
||||
let implements_default = |ty, param_env| {
|
||||
let Some(default_trait) = tcx.get_diagnostic_item(sym::Default) else {
|
||||
return false;
|
||||
};
|
||||
// Regions are already solved, so we must use a fresh InferCtxt,
|
||||
// but the type has region variables, so erase those.
|
||||
tcx.infer_ctxt()
|
||||
.build()
|
||||
.type_implements_trait(
|
||||
default_trait,
|
||||
tcx.erase_regions(ty),
|
||||
ty::List::empty(),
|
||||
param_env,
|
||||
)
|
||||
.must_apply_modulo_regions()
|
||||
};
|
||||
|
||||
let assign_value = match ty.kind() {
|
||||
ty::Bool => "false",
|
||||
ty::Float(_) => "0.0",
|
||||
ty::Int(_) | ty::Uint(_) => "0",
|
||||
ty::Never | ty::Error(_) => "",
|
||||
ty::Adt(def, _) if Some(def.did()) == tcx.get_diagnostic_item(sym::Vec) => "vec![]",
|
||||
ty::Adt(_, _) if implements_default(ty, self.param_env) => "Default::default()",
|
||||
_ => "todo!()",
|
||||
};
|
||||
|
||||
if !assign_value.is_empty() {
|
||||
err.span_suggestion_verbose(
|
||||
sugg_span.shrink_to_hi(),
|
||||
format!("consider assigning a value"),
|
||||
format!(" = {}", assign_value),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn suggest_borrow_fn_like(
|
||||
&self,
|
||||
err: &mut Diagnostic,
|
||||
|
@ -537,41 +612,40 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
.and_then(|def_id| tcx.hir().get_generics(def_id))
|
||||
else { return; };
|
||||
// Try to find predicates on *generic params* that would allow copying `ty`
|
||||
let predicates: Result<Vec<_>, _> = tcx.infer_ctxt().enter(|infcx| {
|
||||
let mut fulfill_cx = <dyn rustc_infer::traits::TraitEngine<'_>>::new(infcx.tcx);
|
||||
let infcx = tcx.infer_ctxt().build();
|
||||
let mut fulfill_cx = <dyn rustc_infer::traits::TraitEngine<'_>>::new(infcx.tcx);
|
||||
|
||||
let copy_did = infcx.tcx.lang_items().copy_trait().unwrap();
|
||||
let cause = ObligationCause::new(
|
||||
span,
|
||||
self.mir_hir_id(),
|
||||
rustc_infer::traits::ObligationCauseCode::MiscObligation,
|
||||
);
|
||||
fulfill_cx.register_bound(
|
||||
&infcx,
|
||||
self.param_env,
|
||||
// Erase any region vids from the type, which may not be resolved
|
||||
infcx.tcx.erase_regions(ty),
|
||||
copy_did,
|
||||
cause,
|
||||
);
|
||||
// Select all, including ambiguous predicates
|
||||
let errors = fulfill_cx.select_all_or_error(&infcx);
|
||||
let copy_did = infcx.tcx.lang_items().copy_trait().unwrap();
|
||||
let cause = ObligationCause::new(
|
||||
span,
|
||||
self.mir_hir_id(),
|
||||
rustc_infer::traits::ObligationCauseCode::MiscObligation,
|
||||
);
|
||||
fulfill_cx.register_bound(
|
||||
&infcx,
|
||||
self.param_env,
|
||||
// Erase any region vids from the type, which may not be resolved
|
||||
infcx.tcx.erase_regions(ty),
|
||||
copy_did,
|
||||
cause,
|
||||
);
|
||||
// Select all, including ambiguous predicates
|
||||
let errors = fulfill_cx.select_all_or_error(&infcx);
|
||||
|
||||
// Only emit suggestion if all required predicates are on generic
|
||||
errors
|
||||
.into_iter()
|
||||
.map(|err| match err.obligation.predicate.kind().skip_binder() {
|
||||
PredicateKind::Trait(predicate) => match predicate.self_ty().kind() {
|
||||
ty::Param(param_ty) => Ok((
|
||||
generics.type_param(param_ty, tcx),
|
||||
predicate.trait_ref.print_only_trait_path().to_string(),
|
||||
)),
|
||||
_ => Err(()),
|
||||
},
|
||||
// Only emit suggestion if all required predicates are on generic
|
||||
let predicates: Result<Vec<_>, _> = errors
|
||||
.into_iter()
|
||||
.map(|err| match err.obligation.predicate.kind().skip_binder() {
|
||||
PredicateKind::Trait(predicate) => match predicate.self_ty().kind() {
|
||||
ty::Param(param_ty) => Ok((
|
||||
generics.type_param(param_ty, tcx),
|
||||
predicate.trait_ref.print_only_trait_path().to_string(),
|
||||
)),
|
||||
_ => Err(()),
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
},
|
||||
_ => Err(()),
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Ok(predicates) = predicates {
|
||||
suggest_constraining_type_params(
|
||||
|
@ -909,7 +983,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
err: &mut Diagnostic,
|
||||
location: Location,
|
||||
issued_borrow: &BorrowData<'tcx>,
|
||||
explanation: BorrowExplanation<'tcx>,
|
||||
explanation: BorrowExplanation,
|
||||
) {
|
||||
let used_in_call = matches!(
|
||||
explanation,
|
||||
|
@ -1259,7 +1333,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
borrow: &BorrowData<'tcx>,
|
||||
drop_span: Span,
|
||||
borrow_spans: UseSpans<'tcx>,
|
||||
explanation: BorrowExplanation<'tcx>,
|
||||
explanation: BorrowExplanation,
|
||||
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
|
||||
debug!(
|
||||
"report_local_value_does_not_live_long_enough(\
|
||||
|
@ -1465,7 +1539,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
drop_span: Span,
|
||||
borrow_spans: UseSpans<'tcx>,
|
||||
proper_span: Span,
|
||||
explanation: BorrowExplanation<'tcx>,
|
||||
explanation: BorrowExplanation,
|
||||
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
|
||||
if let BorrowExplanation::MustBeValidFor { category, span, from_closure: false, .. } =
|
||||
explanation
|
||||
|
@ -1579,7 +1653,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
borrow: &BorrowData<'tcx>,
|
||||
borrow_span: Span,
|
||||
return_span: Span,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
opt_place_desc: Option<&String>,
|
||||
) -> Option<DiagnosticBuilder<'cx, ErrorGuaranteed>> {
|
||||
let return_kind = match category {
|
||||
|
@ -1674,7 +1748,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
use_span: UseSpans<'tcx>,
|
||||
var_span: Span,
|
||||
fr_name: &RegionName,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
constraint_span: Span,
|
||||
captured_var: &str,
|
||||
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
//! Print diagnostics to explain why values are borrowed.
|
||||
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{Applicability, Diagnostic};
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_infer::infer::NllRegionVariableOrigin;
|
||||
|
@ -24,7 +21,7 @@ use crate::{
|
|||
use super::{find_use, RegionName, UseSpans};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum BorrowExplanation<'tcx> {
|
||||
pub(crate) enum BorrowExplanation {
|
||||
UsedLater(LaterUseKind, Span, Option<Span>),
|
||||
UsedLaterInLoop(LaterUseKind, Span, Option<Span>),
|
||||
UsedLaterWhenDropped {
|
||||
|
@ -33,7 +30,7 @@ pub(crate) enum BorrowExplanation<'tcx> {
|
|||
should_note_order: bool,
|
||||
},
|
||||
MustBeValidFor {
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
from_closure: bool,
|
||||
span: Span,
|
||||
region_name: RegionName,
|
||||
|
@ -52,7 +49,7 @@ pub(crate) enum LaterUseKind {
|
|||
Other,
|
||||
}
|
||||
|
||||
impl<'tcx> BorrowExplanation<'tcx> {
|
||||
impl<'tcx> BorrowExplanation {
|
||||
pub(crate) fn is_explained(&self) -> bool {
|
||||
!matches!(self, BorrowExplanation::Unexplained)
|
||||
}
|
||||
|
@ -287,7 +284,7 @@ impl<'tcx> BorrowExplanation<'tcx> {
|
|||
fn add_lifetime_bound_suggestion_to_diagnostic(
|
||||
&self,
|
||||
err: &mut Diagnostic,
|
||||
category: &ConstraintCategory<'tcx>,
|
||||
category: &ConstraintCategory,
|
||||
span: Span,
|
||||
region_name: &RegionName,
|
||||
) {
|
||||
|
@ -319,7 +316,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
&self,
|
||||
borrow_region: RegionVid,
|
||||
outlived_region: RegionVid,
|
||||
) -> (ConstraintCategory<'tcx>, bool, Span, Option<RegionName>, Vec<ExtraConstraintInfo>) {
|
||||
) -> (ConstraintCategory, bool, Span, Option<RegionName>, Vec<ExtraConstraintInfo>) {
|
||||
let (blame_constraint, extra_info) = self.regioncx.best_blame_constraint(
|
||||
borrow_region,
|
||||
NllRegionVariableOrigin::FreeRegion,
|
||||
|
@ -351,7 +348,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
location: Location,
|
||||
borrow: &BorrowData<'tcx>,
|
||||
kind_place: Option<(WriteKind, Place<'tcx>)>,
|
||||
) -> BorrowExplanation<'tcx> {
|
||||
) -> BorrowExplanation {
|
||||
let regioncx = &self.regioncx;
|
||||
let body: &Body<'_> = &self.body;
|
||||
let tcx = self.infcx.tcx;
|
||||
|
@ -359,19 +356,37 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
let borrow_region_vid = borrow.region;
|
||||
debug!(?borrow_region_vid);
|
||||
|
||||
let region_sub = self.regioncx.find_sub_region_live_at(borrow_region_vid, location);
|
||||
let mut region_sub = self.regioncx.find_sub_region_live_at(borrow_region_vid, location);
|
||||
debug!(?region_sub);
|
||||
|
||||
match find_use::find(body, regioncx, tcx, region_sub, location) {
|
||||
let mut use_location = location;
|
||||
let mut use_in_later_iteration_of_loop = false;
|
||||
|
||||
if region_sub == borrow_region_vid {
|
||||
// When `region_sub` is the same as `borrow_region_vid` (the location where the borrow is
|
||||
// issued is the same location that invalidates the reference), this is likely a loop iteration
|
||||
// - in this case, try using the loop terminator location in `find_sub_region_live_at`.
|
||||
if let Some(loop_terminator_location) =
|
||||
regioncx.find_loop_terminator_location(borrow.region, body)
|
||||
{
|
||||
region_sub = self
|
||||
.regioncx
|
||||
.find_sub_region_live_at(borrow_region_vid, loop_terminator_location);
|
||||
debug!("explain_why_borrow_contains_point: region_sub in loop={:?}", region_sub);
|
||||
use_location = loop_terminator_location;
|
||||
use_in_later_iteration_of_loop = true;
|
||||
}
|
||||
}
|
||||
|
||||
match find_use::find(body, regioncx, tcx, region_sub, use_location) {
|
||||
Some(Cause::LiveVar(local, location)) => {
|
||||
let span = body.source_info(location).span;
|
||||
let spans = self
|
||||
.move_spans(Place::from(local).as_ref(), location)
|
||||
.or_else(|| self.borrow_spans(span, location));
|
||||
|
||||
let borrow_location = location;
|
||||
if self.is_use_in_later_iteration_of_loop(borrow_location, location) {
|
||||
let later_use = self.later_use_kind(borrow, spans, location);
|
||||
if use_in_later_iteration_of_loop {
|
||||
let later_use = self.later_use_kind(borrow, spans, use_location);
|
||||
BorrowExplanation::UsedLaterInLoop(later_use.0, later_use.1, later_use.2)
|
||||
} else {
|
||||
// Check if the location represents a `FakeRead`, and adapt the error
|
||||
|
@ -425,131 +440,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// true if `borrow_location` can reach `use_location` by going through a loop and
|
||||
/// `use_location` is also inside of that loop
|
||||
fn is_use_in_later_iteration_of_loop(
|
||||
&self,
|
||||
borrow_location: Location,
|
||||
use_location: Location,
|
||||
) -> bool {
|
||||
let back_edge = self.reach_through_backedge(borrow_location, use_location);
|
||||
back_edge.map_or(false, |back_edge| self.can_reach_head_of_loop(use_location, back_edge))
|
||||
}
|
||||
|
||||
/// Returns the outmost back edge if `from` location can reach `to` location passing through
|
||||
/// that back edge
|
||||
fn reach_through_backedge(&self, from: Location, to: Location) -> Option<Location> {
|
||||
let mut visited_locations = FxHashSet::default();
|
||||
let mut pending_locations = VecDeque::new();
|
||||
visited_locations.insert(from);
|
||||
pending_locations.push_back(from);
|
||||
debug!("reach_through_backedge: from={:?} to={:?}", from, to,);
|
||||
|
||||
let mut outmost_back_edge = None;
|
||||
while let Some(location) = pending_locations.pop_front() {
|
||||
debug!(
|
||||
"reach_through_backedge: location={:?} outmost_back_edge={:?}
|
||||
pending_locations={:?} visited_locations={:?}",
|
||||
location, outmost_back_edge, pending_locations, visited_locations
|
||||
);
|
||||
|
||||
if location == to && outmost_back_edge.is_some() {
|
||||
// We've managed to reach the use location
|
||||
debug!("reach_through_backedge: found!");
|
||||
return outmost_back_edge;
|
||||
}
|
||||
|
||||
let block = &self.body.basic_blocks[location.block];
|
||||
|
||||
if location.statement_index < block.statements.len() {
|
||||
let successor = location.successor_within_block();
|
||||
if visited_locations.insert(successor) {
|
||||
pending_locations.push_back(successor);
|
||||
}
|
||||
} else {
|
||||
pending_locations.extend(
|
||||
block
|
||||
.terminator()
|
||||
.successors()
|
||||
.map(|bb| Location { statement_index: 0, block: bb })
|
||||
.filter(|s| visited_locations.insert(*s))
|
||||
.map(|s| {
|
||||
if self.is_back_edge(location, s) {
|
||||
match outmost_back_edge {
|
||||
None => {
|
||||
outmost_back_edge = Some(location);
|
||||
}
|
||||
|
||||
Some(back_edge)
|
||||
if location.dominates(back_edge, &self.dominators) =>
|
||||
{
|
||||
outmost_back_edge = Some(location);
|
||||
}
|
||||
|
||||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
s
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// true if `from` location can reach `loop_head` location and `loop_head` dominates all the
|
||||
/// intermediate nodes
|
||||
fn can_reach_head_of_loop(&self, from: Location, loop_head: Location) -> bool {
|
||||
self.find_loop_head_dfs(from, loop_head, &mut FxHashSet::default())
|
||||
}
|
||||
|
||||
fn find_loop_head_dfs(
|
||||
&self,
|
||||
from: Location,
|
||||
loop_head: Location,
|
||||
visited_locations: &mut FxHashSet<Location>,
|
||||
) -> bool {
|
||||
visited_locations.insert(from);
|
||||
|
||||
if from == loop_head {
|
||||
return true;
|
||||
}
|
||||
|
||||
if loop_head.dominates(from, &self.dominators) {
|
||||
let block = &self.body.basic_blocks[from.block];
|
||||
|
||||
if from.statement_index < block.statements.len() {
|
||||
let successor = from.successor_within_block();
|
||||
|
||||
if !visited_locations.contains(&successor)
|
||||
&& self.find_loop_head_dfs(successor, loop_head, visited_locations)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
for bb in block.terminator().successors() {
|
||||
let successor = Location { statement_index: 0, block: bb };
|
||||
|
||||
if !visited_locations.contains(&successor)
|
||||
&& self.find_loop_head_dfs(successor, loop_head, visited_locations)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// True if an edge `source -> target` is a backedge -- in other words, if the target
|
||||
/// dominates the source.
|
||||
fn is_back_edge(&self, source: Location, target: Location) -> bool {
|
||||
target.dominates(source, &self.dominators)
|
||||
}
|
||||
|
||||
/// Determine how the borrow was later used.
|
||||
/// First span returned points to the location of the conflicting use
|
||||
/// Second span if `Some` is returned in the case of closures and points
|
||||
|
|
|
@ -972,7 +972,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
move_span: Span,
|
||||
move_spans: UseSpans<'tcx>,
|
||||
moved_place: Place<'tcx>,
|
||||
used_place: Option<PlaceRef<'tcx>>,
|
||||
partially_str: &str,
|
||||
loop_message: &str,
|
||||
move_msg: &str,
|
||||
|
@ -1026,7 +1025,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
if let Some((CallDesugaringKind::ForLoopIntoIter, _)) = desugaring {
|
||||
let ty = moved_place.ty(self.body, self.infcx.tcx).ty;
|
||||
let suggest = match self.infcx.tcx.get_diagnostic_item(sym::IntoIterator) {
|
||||
Some(def_id) => self.infcx.tcx.infer_ctxt().enter(|infcx| {
|
||||
Some(def_id) => {
|
||||
let infcx = self.infcx.tcx.infer_ctxt().build();
|
||||
type_known_to_meet_bound_modulo_regions(
|
||||
&infcx,
|
||||
self.param_env,
|
||||
|
@ -1037,7 +1037,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
def_id,
|
||||
DUMMY_SP,
|
||||
)
|
||||
}),
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
if suggest {
|
||||
|
@ -1060,9 +1060,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
place_name, partially_str, loop_message
|
||||
),
|
||||
);
|
||||
// If we have a `&mut` ref, we need to reborrow.
|
||||
if let Some(ty::Ref(_, _, hir::Mutability::Mut)) = used_place
|
||||
.map(|used_place| used_place.ty(self.body, self.infcx.tcx).ty.kind())
|
||||
// If the moved place was a `&mut` ref, then we can
|
||||
// suggest to reborrow it where it was moved, so it
|
||||
// will still be valid by the time we get to the usage.
|
||||
if let ty::Ref(_, _, hir::Mutability::Mut) =
|
||||
moved_place.ty(self.body, self.infcx.tcx).ty.kind()
|
||||
{
|
||||
// If we are in a loop this will be suggested later.
|
||||
if !is_loop_move {
|
||||
|
|
|
@ -401,7 +401,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
};
|
||||
if let Some(use_spans) = use_spans {
|
||||
self.explain_captures(
|
||||
&mut err, span, span, use_spans, move_place, None, "", "", "", false, true,
|
||||
&mut err, span, span, use_spans, move_place, "", "", "", false, true,
|
||||
);
|
||||
}
|
||||
err
|
||||
|
|
|
@ -932,7 +932,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
let opt_suggestions = self
|
||||
.infcx
|
||||
.tcx
|
||||
.typeck(path_segment.hir_id.owner)
|
||||
.typeck(path_segment.hir_id.owner.def_id)
|
||||
.type_dependent_def_id(*hir_id)
|
||||
.and_then(|def_id| self.infcx.tcx.impl_of_method(def_id))
|
||||
.map(|def_id| self.infcx.tcx.associated_items(def_id))
|
||||
|
@ -1032,7 +1032,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
if look_at_return && hir.get_return_block(closure_id).is_some() {
|
||||
// ...otherwise we are probably in the tail expression of the function, point at the
|
||||
// return type.
|
||||
match hir.get_by_def_id(hir.get_parent_item(fn_call_id)) {
|
||||
match hir.get_by_def_id(hir.get_parent_item(fn_call_id).def_id) {
|
||||
hir::Node::Item(hir::Item { ident, kind: hir::ItemKind::Fn(sig, ..), .. })
|
||||
| hir::Node::TraitItem(hir::TraitItem {
|
||||
ident,
|
||||
|
|
|
@ -161,7 +161,7 @@ impl OutlivesSuggestionBuilder {
|
|||
pub(crate) fn intermediate_suggestion(
|
||||
&mut self,
|
||||
mbcx: &MirBorrowckCtxt<'_, '_>,
|
||||
errci: &ErrorConstraintInfo<'_>,
|
||||
errci: &ErrorConstraintInfo,
|
||||
diag: &mut Diagnostic,
|
||||
) {
|
||||
// Emit an intermediate note.
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
//! Error reporting machinery for lifetime errors.
|
||||
|
||||
use either::Either;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
|
||||
use rustc_hir::def_id::DefId;
|
||||
|
@ -16,7 +17,7 @@ use rustc_infer::infer::{
|
|||
NllRegionVariableOrigin, RelateParamBound,
|
||||
};
|
||||
use rustc_middle::hir::place::PlaceBase;
|
||||
use rustc_middle::mir::{ConstraintCategory, ReturnConstraint};
|
||||
use rustc_middle::mir::{ConstraintCategory, ReturnConstraint, TerminatorKind};
|
||||
use rustc_middle::ty::subst::InternalSubsts;
|
||||
use rustc_middle::ty::Region;
|
||||
use rustc_middle::ty::TypeVisitor;
|
||||
|
@ -39,7 +40,7 @@ use crate::{
|
|||
MirBorrowckCtxt,
|
||||
};
|
||||
|
||||
impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> {
|
||||
impl ConstraintDescription for ConstraintCategory {
|
||||
fn description(&self) -> &'static str {
|
||||
// Must end with a space. Allows for empty names to be provided.
|
||||
match self {
|
||||
|
@ -115,7 +116,7 @@ pub(crate) enum RegionErrorKind<'tcx> {
|
|||
|
||||
/// Information about the various region constraints involved in a borrow checker error.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ErrorConstraintInfo<'tcx> {
|
||||
pub struct ErrorConstraintInfo {
|
||||
// fr: outlived_fr
|
||||
pub(super) fr: RegionVid,
|
||||
pub(super) fr_is_local: bool,
|
||||
|
@ -123,7 +124,7 @@ pub struct ErrorConstraintInfo<'tcx> {
|
|||
pub(super) outlived_fr_is_local: bool,
|
||||
|
||||
// Category and span for best blame constraint
|
||||
pub(super) category: ConstraintCategory<'tcx>,
|
||||
pub(super) category: ConstraintCategory,
|
||||
pub(super) span: Span,
|
||||
}
|
||||
|
||||
|
@ -186,7 +187,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
if let Some(lower_bound_region) = lower_bound_region {
|
||||
let generic_ty = type_test.generic_kind.to_ty(self.infcx.tcx);
|
||||
let origin = RelateParamBound(type_test_span, generic_ty, None);
|
||||
self.buffer_error(self.infcx.construct_generic_bound_failure(
|
||||
self.buffer_error(self.infcx.err_ctxt().construct_generic_bound_failure(
|
||||
self.body.source.def_id().expect_local(),
|
||||
type_test_span,
|
||||
Some(origin),
|
||||
|
@ -281,7 +282,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
let tcx = self.infcx.tcx;
|
||||
match tcx.hir().get_if_local(def_id) {
|
||||
Some(Node::ImplItem(impl_item)) => {
|
||||
match tcx.hir().find_by_def_id(tcx.hir().get_parent_item(impl_item.hir_id())) {
|
||||
match tcx.hir().find_by_def_id(tcx.hir().get_parent_item(impl_item.hir_id()).def_id)
|
||||
{
|
||||
Some(Node::Item(Item {
|
||||
kind: ItemKind::Impl(hir::Impl { self_ty, .. }),
|
||||
..
|
||||
|
@ -291,7 +293,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
}
|
||||
Some(Node::TraitItem(trait_item)) => {
|
||||
let trait_did = tcx.hir().get_parent_item(trait_item.hir_id());
|
||||
match tcx.hir().find_by_def_id(trait_did) {
|
||||
match tcx.hir().find_by_def_id(trait_did.def_id) {
|
||||
Some(Node::Item(Item { kind: ItemKind::Trait(..), .. })) => {
|
||||
// The method being called is defined in the `trait`, but the `'static`
|
||||
// obligation comes from the `impl`. Find that `impl` so that we can point
|
||||
|
@ -340,7 +342,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
/// Report an error because the universal region `fr` was required to outlive
|
||||
/// `outlived_fr` but it is not known to do so. For example:
|
||||
///
|
||||
/// ```compile_fail,E0312
|
||||
/// ```compile_fail
|
||||
/// fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }
|
||||
/// ```
|
||||
///
|
||||
|
@ -364,7 +366,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
|
||||
// Check if we can use one of the "nice region errors".
|
||||
if let (Some(f), Some(o)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {
|
||||
let nice = NiceRegionError::new_from_span(self.infcx, cause.span, o, f);
|
||||
let infer_err = self.infcx.err_ctxt();
|
||||
let nice = NiceRegionError::new_from_span(&infer_err, cause.span, o, f);
|
||||
if let Some(diag) = nice.try_report_from_nll() {
|
||||
self.buffer_error(diag);
|
||||
return;
|
||||
|
@ -496,7 +499,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
/// ```
|
||||
fn report_fnmut_error(
|
||||
&self,
|
||||
errci: &ErrorConstraintInfo<'tcx>,
|
||||
errci: &ErrorConstraintInfo,
|
||||
kind: ReturnConstraint,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
let ErrorConstraintInfo { outlived_fr, span, .. } = errci;
|
||||
|
@ -569,7 +572,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
#[instrument(level = "debug", skip(self))]
|
||||
fn report_escaping_data_error(
|
||||
&self,
|
||||
errci: &ErrorConstraintInfo<'tcx>,
|
||||
errci: &ErrorConstraintInfo,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
let ErrorConstraintInfo { span, category, .. } = errci;
|
||||
|
||||
|
@ -673,7 +676,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
/// ```
|
||||
fn report_general_error(
|
||||
&self,
|
||||
errci: &ErrorConstraintInfo<'tcx>,
|
||||
errci: &ErrorConstraintInfo,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
let ErrorConstraintInfo {
|
||||
fr,
|
||||
|
@ -786,7 +789,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
diag: &mut Diagnostic,
|
||||
f: Region<'tcx>,
|
||||
o: Region<'tcx>,
|
||||
category: &ConstraintCategory<'tcx>,
|
||||
category: &ConstraintCategory,
|
||||
) {
|
||||
if !o.is_static() {
|
||||
return;
|
||||
|
@ -794,7 +797,12 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
|
||||
let tcx = self.infcx.tcx;
|
||||
|
||||
let instance = if let ConstraintCategory::CallArgument(Some(func_ty)) = category {
|
||||
let instance =
|
||||
if let ConstraintCategory::CallArgument(location) = category
|
||||
&& let Either::Right(term) = self.body.stmt_at(*location)
|
||||
&& let TerminatorKind::Call { func, .. } = &term.kind
|
||||
{
|
||||
let func_ty = func.ty(self.body, tcx);
|
||||
let (fn_did, substs) = match func_ty.kind() {
|
||||
ty::FnDef(fn_did, substs) => (fn_did, substs),
|
||||
_ => return,
|
||||
|
|
|
@ -707,7 +707,8 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||
hir::AsyncGeneratorKind::Block => " of async block",
|
||||
hir::AsyncGeneratorKind::Closure => " of async closure",
|
||||
hir::AsyncGeneratorKind::Fn => {
|
||||
let parent_item = hir.get_by_def_id(hir.get_parent_item(mir_hir_id));
|
||||
let parent_item =
|
||||
hir.get_by_def_id(hir.get_parent_item(mir_hir_id).def_id);
|
||||
let output = &parent_item
|
||||
.fn_decl()
|
||||
.expect("generator lowered from async fn should be in fn")
|
||||
|
@ -863,15 +864,13 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||
};
|
||||
|
||||
let tcx = self.infcx.tcx;
|
||||
let body_parent_did = tcx.opt_parent(self.mir_def_id().to_def_id())?;
|
||||
if tcx.parent(region.def_id) != body_parent_did
|
||||
|| tcx.def_kind(body_parent_did) != DefKind::Impl
|
||||
{
|
||||
let region_parent = tcx.parent(region.def_id);
|
||||
if tcx.def_kind(region_parent) != DefKind::Impl {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut found = false;
|
||||
tcx.fold_regions(tcx.type_of(body_parent_did), |r: ty::Region<'tcx>, _| {
|
||||
tcx.fold_regions(tcx.type_of(region_parent), |r: ty::Region<'tcx>, _| {
|
||||
if *r == ty::ReEarlyBound(region) {
|
||||
found = true;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#![allow(rustc::potential_query_instability)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(let_chains)]
|
||||
#![cfg_attr(bootstrap, feature(let_else))]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(never_type)]
|
||||
#![feature(rustc_attrs)]
|
||||
|
@ -132,14 +131,11 @@ fn mir_borrowck<'tcx>(
|
|||
debug!("run query mir_borrowck: {}", tcx.def_path_str(def.did.to_def_id()));
|
||||
let hir_owner = tcx.hir().local_def_id_to_hir_id(def.did).owner;
|
||||
|
||||
let opt_closure_req = tcx
|
||||
.infer_ctxt()
|
||||
.with_opaque_type_inference(DefiningAnchor::Bind(hir_owner))
|
||||
.enter(|infcx| {
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
do_mir_borrowck(&infcx, input_body, promoted, false).0
|
||||
});
|
||||
let infcx =
|
||||
tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bind(hir_owner.def_id)).build();
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
let opt_closure_req = do_mir_borrowck(&infcx, input_body, promoted, false).0;
|
||||
debug!("mir_borrowck done");
|
||||
|
||||
tcx.arena.alloc(opt_closure_req)
|
||||
|
@ -151,8 +147,8 @@ fn mir_borrowck<'tcx>(
|
|||
/// region ids on which the borrow checking was performed together with Polonius
|
||||
/// facts.
|
||||
#[instrument(skip(infcx, input_body, input_promoted), fields(id=?input_body.source.with_opt_param().as_local().unwrap()), level = "debug")]
|
||||
fn do_mir_borrowck<'a, 'tcx>(
|
||||
infcx: &InferCtxt<'a, 'tcx>,
|
||||
fn do_mir_borrowck<'tcx>(
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
input_body: &Body<'tcx>,
|
||||
input_promoted: &IndexVec<Promoted, Body<'tcx>>,
|
||||
return_body_with_facts: bool,
|
||||
|
@ -475,7 +471,7 @@ pub struct BodyWithBorrowckFacts<'tcx> {
|
|||
}
|
||||
|
||||
struct MirBorrowckCtxt<'cx, 'tcx> {
|
||||
infcx: &'cx InferCtxt<'cx, 'tcx>,
|
||||
infcx: &'cx InferCtxt<'tcx>,
|
||||
param_env: ParamEnv<'tcx>,
|
||||
body: &'cx Body<'tcx>,
|
||||
move_data: &'cx MoveData<'tcx>,
|
||||
|
|
|
@ -55,8 +55,8 @@ pub(crate) struct NllOutput<'tcx> {
|
|||
/// regions (e.g., region parameters) declared on the function. That set will need to be given to
|
||||
/// `compute_regions`.
|
||||
#[instrument(skip(infcx, param_env, body, promoted), level = "debug")]
|
||||
pub(crate) fn replace_regions_in_mir<'cx, 'tcx>(
|
||||
infcx: &InferCtxt<'cx, 'tcx>,
|
||||
pub(crate) fn replace_regions_in_mir<'tcx>(
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body: &mut Body<'tcx>,
|
||||
promoted: &mut IndexVec<Promoted, Body<'tcx>>,
|
||||
|
@ -155,7 +155,7 @@ fn populate_polonius_move_facts(
|
|||
///
|
||||
/// This may result in errors being reported.
|
||||
pub(crate) fn compute_regions<'cx, 'tcx>(
|
||||
infcx: &InferCtxt<'cx, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
universal_regions: UniversalRegions<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
promoted: &IndexVec<Promoted, Body<'tcx>>,
|
||||
|
@ -318,8 +318,8 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn dump_mir_results<'a, 'tcx>(
|
||||
infcx: &InferCtxt<'a, 'tcx>,
|
||||
pub(super) fn dump_mir_results<'tcx>(
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
regioncx: &RegionInferenceContext<'tcx>,
|
||||
closure_region_requirements: &Option<ClosureRegionRequirements<'_>>,
|
||||
|
@ -368,8 +368,8 @@ pub(super) fn dump_mir_results<'a, 'tcx>(
|
|||
};
|
||||
}
|
||||
|
||||
pub(super) fn dump_annotation<'a, 'tcx>(
|
||||
infcx: &InferCtxt<'a, 'tcx>,
|
||||
pub(super) fn dump_annotation<'tcx>(
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
regioncx: &RegionInferenceContext<'tcx>,
|
||||
closure_region_requirements: &Option<ClosureRegionRequirements<'_>>,
|
||||
|
|
|
@ -15,7 +15,7 @@ use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound,
|
|||
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin};
|
||||
use rustc_middle::mir::{
|
||||
Body, ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegionRequirements,
|
||||
ConstraintCategory, Local, Location, ReturnConstraint,
|
||||
ConstraintCategory, Local, Location, ReturnConstraint, TerminatorKind,
|
||||
};
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::traits::ObligationCauseCode;
|
||||
|
@ -91,7 +91,7 @@ pub struct RegionInferenceContext<'tcx> {
|
|||
|
||||
/// Map closure bounds to a `Span` that should be used for error reporting.
|
||||
closure_bounds_mapping:
|
||||
FxHashMap<Location, FxHashMap<(RegionVid, RegionVid), (ConstraintCategory<'tcx>, Span)>>,
|
||||
FxHashMap<Location, FxHashMap<(RegionVid, RegionVid), (ConstraintCategory, Span)>>,
|
||||
|
||||
/// Map universe indexes to information on why we created it.
|
||||
universe_causes: FxHashMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
|
||||
|
@ -267,7 +267,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
member_constraints_in: MemberConstraintSet<'tcx, RegionVid>,
|
||||
closure_bounds_mapping: FxHashMap<
|
||||
Location,
|
||||
FxHashMap<(RegionVid, RegionVid), (ConstraintCategory<'tcx>, Span)>,
|
||||
FxHashMap<(RegionVid, RegionVid), (ConstraintCategory, Span)>,
|
||||
>,
|
||||
universe_causes: FxHashMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
|
||||
type_tests: Vec<TypeTest<'tcx>>,
|
||||
|
@ -565,7 +565,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
#[instrument(skip(self, infcx, body, polonius_output), level = "debug")]
|
||||
pub(super) fn solve(
|
||||
&mut self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
polonius_output: Option<Rc<PoloniusOutput>>,
|
||||
|
@ -835,7 +835,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
/// 'a`. See `TypeTest` for more details.
|
||||
fn check_type_tests(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'tcx>>>,
|
||||
|
@ -923,7 +923,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
#[instrument(level = "debug", skip(self, infcx, propagated_outlives_requirements))]
|
||||
fn try_promote_type_test(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
type_test: &TypeTest<'tcx>,
|
||||
|
@ -1036,7 +1036,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
#[instrument(level = "debug", skip(self, infcx))]
|
||||
fn try_promote_type_test_subject(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> Option<ClosureOutlivesSubject<'tcx>> {
|
||||
let tcx = infcx.tcx;
|
||||
|
@ -1212,7 +1212,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
/// `point`.
|
||||
fn eval_verify_bound(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
generic_ty: Ty<'tcx>,
|
||||
|
@ -1262,7 +1262,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
|
||||
fn eval_if_eq(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
generic_ty: Ty<'tcx>,
|
||||
lower_bound: RegionVid,
|
||||
|
@ -1398,7 +1398,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
/// whether any of the constraints were too strong. In particular,
|
||||
/// we want to check for a case where a universally quantified
|
||||
/// region exceeded its bounds. Consider:
|
||||
/// ```compile_fail,E0312
|
||||
/// ```compile_fail
|
||||
/// fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }
|
||||
/// ```
|
||||
/// In this case, returning `x` requires `&'a u32 <: &'b u32`
|
||||
|
@ -1451,7 +1451,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
/// <https://smallcultfollowing.com/babysteps/blog/2019/01/17/polonius-and-region-errors/>
|
||||
///
|
||||
/// In the canonical example
|
||||
/// ```compile_fail,E0312
|
||||
/// ```compile_fail
|
||||
/// fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }
|
||||
/// ```
|
||||
/// returning `x` requires `&'a u32 <: &'b u32` and hence we establish (transitively) a
|
||||
|
@ -1718,7 +1718,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
|
||||
fn check_member_constraints(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
errors_buffer: &mut RegionErrors<'tcx>,
|
||||
) {
|
||||
let member_constraints = self.member_constraints.clone();
|
||||
|
@ -1807,7 +1807,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
pub(crate) fn retrieve_closure_constraint_info(
|
||||
&self,
|
||||
constraint: OutlivesConstraint<'tcx>,
|
||||
) -> Option<(ConstraintCategory<'tcx>, Span)> {
|
||||
) -> Option<(ConstraintCategory, Span)> {
|
||||
match constraint.locations {
|
||||
Locations::All(_) => None,
|
||||
Locations::Single(loc) => {
|
||||
|
@ -1822,7 +1822,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
fr1: RegionVid,
|
||||
fr1_origin: NllRegionVariableOrigin,
|
||||
fr2: RegionVid,
|
||||
) -> (ConstraintCategory<'tcx>, ObligationCause<'tcx>) {
|
||||
) -> (ConstraintCategory, ObligationCause<'tcx>) {
|
||||
let BlameConstraint { category, cause, .. } = self
|
||||
.best_blame_constraint(fr1, fr1_origin, |r| self.provides_universal_region(r, fr1, fr2))
|
||||
.0;
|
||||
|
@ -2236,6 +2236,27 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
pub(crate) fn universe_info(&self, universe: ty::UniverseIndex) -> UniverseInfo<'tcx> {
|
||||
self.universe_causes[&universe].clone()
|
||||
}
|
||||
|
||||
/// Tries to find the terminator of the loop in which the region 'r' resides.
|
||||
/// Returns the location of the terminator if found.
|
||||
pub(crate) fn find_loop_terminator_location(
|
||||
&self,
|
||||
r: RegionVid,
|
||||
body: &Body<'_>,
|
||||
) -> Option<Location> {
|
||||
let scc = self.constraint_sccs.scc(r.to_region_vid());
|
||||
let locations = self.scc_values.locations_outlived_by(scc);
|
||||
for location in locations {
|
||||
let bb = &body[location.block];
|
||||
if let Some(terminator) = &bb.terminator {
|
||||
// terminator of a loop should be TerminatorKind::FalseUnwind
|
||||
if let TerminatorKind::FalseUnwind { .. } = terminator.kind {
|
||||
return Some(location);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> RegionDefinition<'tcx> {
|
||||
|
@ -2341,7 +2362,7 @@ impl<'tcx> ClosureRegionRequirementsExt<'tcx> for ClosureRegionRequirements<'tcx
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BlameConstraint<'tcx> {
|
||||
pub category: ConstraintCategory<'tcx>,
|
||||
pub category: ConstraintCategory,
|
||||
pub from_closure: bool,
|
||||
pub cause: ObligationCause<'tcx>,
|
||||
pub variance_info: ty::VarianceDiagInfo<'tcx>,
|
||||
|
|
|
@ -2,22 +2,18 @@ use rustc_data_structures::fx::FxHashMap;
|
|||
use rustc_data_structures::vec_map::VecMap;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_hir::OpaqueTyOrigin;
|
||||
use rustc_infer::infer::error_reporting::unexpected_hidden_region_diagnostic;
|
||||
use rustc_infer::infer::TyCtxtInferExt as _;
|
||||
use rustc_infer::infer::{DefiningAnchor, InferCtxt};
|
||||
use rustc_infer::traits::{Obligation, ObligationCause, TraitEngine};
|
||||
use rustc_middle::ty::fold::{TypeFolder, TypeSuperFoldable};
|
||||
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, InternalSubsts};
|
||||
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
|
||||
use rustc_middle::ty::visit::TypeVisitable;
|
||||
use rustc_middle::ty::{
|
||||
self, OpaqueHiddenType, OpaqueTypeKey, ToPredicate, Ty, TyCtxt, TypeFoldable,
|
||||
};
|
||||
use rustc_span::Span;
|
||||
use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
|
||||
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
|
||||
use rustc_trait_selection::traits::TraitEngineExt as _;
|
||||
|
||||
use crate::session_diagnostics::ConstNotUsedTraitAlias;
|
||||
|
||||
use super::RegionInferenceContext;
|
||||
|
||||
impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
|
@ -63,7 +59,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
#[instrument(level = "debug", skip(self, infcx), ret)]
|
||||
pub(crate) fn infer_opaque_types(
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
opaque_ty_decls: VecMap<OpaqueTypeKey<'tcx>, (OpaqueHiddenType<'tcx>, OpaqueTyOrigin)>,
|
||||
) -> VecMap<LocalDefId, OpaqueHiddenType<'tcx>> {
|
||||
let mut result: VecMap<LocalDefId, OpaqueHiddenType<'tcx>> = VecMap::new();
|
||||
|
@ -194,7 +190,7 @@ pub trait InferCtxtExt<'tcx> {
|
|||
) -> Ty<'tcx>;
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
||||
/// Given the fully resolved, instantiated type for an opaque
|
||||
/// type, i.e., the value of an inference variable like C1 or C2
|
||||
/// (*), computes the "definition type" for an opaque type
|
||||
|
@ -229,31 +225,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
|||
return self.tcx.ty_error();
|
||||
}
|
||||
|
||||
let OpaqueTypeKey { def_id, substs } = opaque_type_key;
|
||||
|
||||
// Use substs to build up a reverse map from regions to their
|
||||
// identity mappings. This is necessary because of `impl
|
||||
// Trait` lifetimes are computed by replacing existing
|
||||
// lifetimes with 'static and remapping only those used in the
|
||||
// `impl Trait` return type, resulting in the parameters
|
||||
// shifting.
|
||||
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id.to_def_id());
|
||||
debug!(?id_substs);
|
||||
let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> =
|
||||
substs.iter().enumerate().map(|(index, subst)| (subst, id_substs[index])).collect();
|
||||
debug!("map = {:#?}", map);
|
||||
|
||||
// Convert the type from the function into a type valid outside
|
||||
// the function, by replacing invalid regions with 'static,
|
||||
// after producing an error for each of them.
|
||||
let definition_ty = instantiated_ty.ty.fold_with(&mut ReverseMapper::new(
|
||||
self.tcx,
|
||||
opaque_type_key,
|
||||
map,
|
||||
instantiated_ty.ty,
|
||||
instantiated_ty.span,
|
||||
));
|
||||
debug!(?definition_ty);
|
||||
let definition_ty = instantiated_ty
|
||||
.remap_generic_params_to_declaration_params(opaque_type_key, self.tcx, false, origin)
|
||||
.ty;
|
||||
|
||||
if !check_opaque_type_parameter_valid(
|
||||
self.tcx,
|
||||
|
@ -266,72 +240,72 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
|||
|
||||
// Only check this for TAIT. RPIT already supports `src/test/ui/impl-trait/nested-return-type2.rs`
|
||||
// on stable and we'd break that.
|
||||
if let OpaqueTyOrigin::TyAlias = origin {
|
||||
// This logic duplicates most of `check_opaque_meets_bounds`.
|
||||
// FIXME(oli-obk): Also do region checks here and then consider removing `check_opaque_meets_bounds` entirely.
|
||||
let param_env = self.tcx.param_env(def_id);
|
||||
let body_id = self.tcx.local_def_id_to_hir_id(def_id);
|
||||
// HACK This bubble is required for this tests to pass:
|
||||
// type-alias-impl-trait/issue-67844-nested-opaque.rs
|
||||
self.tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).enter(
|
||||
move |infcx| {
|
||||
// Require the hidden type to be well-formed with only the generics of the opaque type.
|
||||
// Defining use functions may have more bounds than the opaque type, which is ok, as long as the
|
||||
// hidden type is well formed even without those bounds.
|
||||
let predicate =
|
||||
ty::Binder::dummy(ty::PredicateKind::WellFormed(definition_ty.into()))
|
||||
.to_predicate(infcx.tcx);
|
||||
let mut fulfillment_cx = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
|
||||
let OpaqueTyOrigin::TyAlias = origin else {
|
||||
return definition_ty;
|
||||
};
|
||||
let def_id = opaque_type_key.def_id;
|
||||
// This logic duplicates most of `check_opaque_meets_bounds`.
|
||||
// FIXME(oli-obk): Also do region checks here and then consider removing `check_opaque_meets_bounds` entirely.
|
||||
let param_env = self.tcx.param_env(def_id);
|
||||
let body_id = self.tcx.local_def_id_to_hir_id(def_id);
|
||||
// HACK This bubble is required for this tests to pass:
|
||||
// type-alias-impl-trait/issue-67844-nested-opaque.rs
|
||||
let infcx =
|
||||
self.tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).build();
|
||||
// Require the hidden type to be well-formed with only the generics of the opaque type.
|
||||
// Defining use functions may have more bounds than the opaque type, which is ok, as long as the
|
||||
// hidden type is well formed even without those bounds.
|
||||
let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(definition_ty.into()))
|
||||
.to_predicate(infcx.tcx);
|
||||
let mut fulfillment_cx = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
|
||||
|
||||
// Require that the hidden type actually fulfills all the bounds of the opaque type, even without
|
||||
// the bounds that the function supplies.
|
||||
match infcx.register_hidden_type(
|
||||
OpaqueTypeKey { def_id, substs: id_substs },
|
||||
ObligationCause::misc(instantiated_ty.span, body_id),
|
||||
param_env,
|
||||
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id.to_def_id());
|
||||
|
||||
// Require that the hidden type actually fulfills all the bounds of the opaque type, even without
|
||||
// the bounds that the function supplies.
|
||||
match infcx.register_hidden_type(
|
||||
OpaqueTypeKey { def_id, substs: id_substs },
|
||||
ObligationCause::misc(instantiated_ty.span, body_id),
|
||||
param_env,
|
||||
definition_ty,
|
||||
origin,
|
||||
) {
|
||||
Ok(infer_ok) => {
|
||||
for obligation in infer_ok.obligations {
|
||||
fulfillment_cx.register_predicate_obligation(&infcx, obligation);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
infcx
|
||||
.err_ctxt()
|
||||
.report_mismatched_types(
|
||||
&ObligationCause::misc(instantiated_ty.span, body_id),
|
||||
self.tcx.mk_opaque(def_id.to_def_id(), id_substs),
|
||||
definition_ty,
|
||||
origin,
|
||||
) {
|
||||
Ok(infer_ok) => {
|
||||
for obligation in infer_ok.obligations {
|
||||
fulfillment_cx.register_predicate_obligation(&infcx, obligation);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
infcx
|
||||
.report_mismatched_types(
|
||||
&ObligationCause::misc(instantiated_ty.span, body_id),
|
||||
self.tcx.mk_opaque(def_id.to_def_id(), id_substs),
|
||||
definition_ty,
|
||||
err,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
err,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
fulfillment_cx.register_predicate_obligation(
|
||||
&infcx,
|
||||
Obligation::misc(instantiated_ty.span, body_id, param_env, predicate),
|
||||
);
|
||||
fulfillment_cx.register_predicate_obligation(
|
||||
&infcx,
|
||||
Obligation::misc(instantiated_ty.span, body_id, param_env, predicate),
|
||||
);
|
||||
|
||||
// Check that all obligations are satisfied by the implementation's
|
||||
// version.
|
||||
let errors = fulfillment_cx.select_all_or_error(&infcx);
|
||||
// Check that all obligations are satisfied by the implementation's
|
||||
// version.
|
||||
let errors = fulfillment_cx.select_all_or_error(&infcx);
|
||||
|
||||
// This is still required for many(half of the tests in ui/type-alias-impl-trait)
|
||||
// tests to pass
|
||||
let _ = infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
|
||||
// This is still required for many(half of the tests in ui/type-alias-impl-trait)
|
||||
// tests to pass
|
||||
let _ = infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
|
||||
|
||||
if errors.is_empty() {
|
||||
definition_ty
|
||||
} else {
|
||||
infcx.report_fulfillment_errors(&errors, None, false);
|
||||
self.tcx.ty_error()
|
||||
}
|
||||
},
|
||||
)
|
||||
} else {
|
||||
if errors.is_empty() {
|
||||
definition_ty
|
||||
} else {
|
||||
infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
|
||||
self.tcx.ty_error()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -427,221 +401,3 @@ fn check_opaque_type_parameter_valid(
|
|||
}
|
||||
true
|
||||
}
|
||||
|
||||
struct ReverseMapper<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
||||
key: ty::OpaqueTypeKey<'tcx>,
|
||||
map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>>,
|
||||
do_not_error: bool,
|
||||
|
||||
/// initially `Some`, set to `None` once error has been reported
|
||||
hidden_ty: Option<Ty<'tcx>>,
|
||||
|
||||
/// Span of function being checked.
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl<'tcx> ReverseMapper<'tcx> {
|
||||
fn new(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
key: ty::OpaqueTypeKey<'tcx>,
|
||||
map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>>,
|
||||
hidden_ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
) -> Self {
|
||||
Self { tcx, key, map, do_not_error: false, hidden_ty: Some(hidden_ty), span }
|
||||
}
|
||||
|
||||
fn fold_kind_no_missing_regions_error(&mut self, kind: GenericArg<'tcx>) -> GenericArg<'tcx> {
|
||||
assert!(!self.do_not_error);
|
||||
self.do_not_error = true;
|
||||
let kind = kind.fold_with(self);
|
||||
self.do_not_error = false;
|
||||
kind
|
||||
}
|
||||
|
||||
fn fold_kind_normally(&mut self, kind: GenericArg<'tcx>) -> GenericArg<'tcx> {
|
||||
assert!(!self.do_not_error);
|
||||
kind.fold_with(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> {
|
||||
fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
|
||||
match *r {
|
||||
// Ignore bound regions and `'static` regions that appear in the
|
||||
// type, we only need to remap regions that reference lifetimes
|
||||
// from the function declaration.
|
||||
// This would ignore `'r` in a type like `for<'r> fn(&'r u32)`.
|
||||
ty::ReLateBound(..) | ty::ReStatic => return r,
|
||||
|
||||
// If regions have been erased (by writeback), don't try to unerase
|
||||
// them.
|
||||
ty::ReErased => return r,
|
||||
|
||||
// The regions that we expect from borrow checking.
|
||||
ty::ReEarlyBound(_) | ty::ReFree(_) => {}
|
||||
|
||||
ty::RePlaceholder(_) | ty::ReVar(_) => {
|
||||
// All of the regions in the type should either have been
|
||||
// erased by writeback, or mapped back to named regions by
|
||||
// borrow checking.
|
||||
bug!("unexpected region kind in opaque type: {:?}", r);
|
||||
}
|
||||
}
|
||||
|
||||
let generics = self.tcx().generics_of(self.key.def_id);
|
||||
match self.map.get(&r.into()).map(|k| k.unpack()) {
|
||||
Some(GenericArgKind::Lifetime(r1)) => r1,
|
||||
Some(u) => panic!("region mapped to unexpected kind: {:?}", u),
|
||||
None if self.do_not_error => self.tcx.lifetimes.re_static,
|
||||
None if generics.parent.is_some() => {
|
||||
if let Some(hidden_ty) = self.hidden_ty.take() {
|
||||
unexpected_hidden_region_diagnostic(
|
||||
self.tcx,
|
||||
self.tcx.def_span(self.key.def_id),
|
||||
hidden_ty,
|
||||
r,
|
||||
self.key,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
self.tcx.lifetimes.re_static
|
||||
}
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(self.span, "non-defining opaque type use in defining scope")
|
||||
.span_label(
|
||||
self.span,
|
||||
format!(
|
||||
"lifetime `{}` is part of concrete type but not used in \
|
||||
parameter list of the `impl Trait` type alias",
|
||||
r
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
|
||||
self.tcx().lifetimes.re_static
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
match *ty.kind() {
|
||||
ty::Closure(def_id, substs) => {
|
||||
// I am a horrible monster and I pray for death. When
|
||||
// we encounter a closure here, it is always a closure
|
||||
// from within the function that we are currently
|
||||
// type-checking -- one that is now being encapsulated
|
||||
// in an opaque type. Ideally, we would
|
||||
// go through the types/lifetimes that it references
|
||||
// and treat them just like we would any other type,
|
||||
// which means we would error out if we find any
|
||||
// reference to a type/region that is not in the
|
||||
// "reverse map".
|
||||
//
|
||||
// **However,** in the case of closures, there is a
|
||||
// somewhat subtle (read: hacky) consideration. The
|
||||
// problem is that our closure types currently include
|
||||
// all the lifetime parameters declared on the
|
||||
// enclosing function, even if they are unused by the
|
||||
// closure itself. We can't readily filter them out,
|
||||
// so here we replace those values with `'empty`. This
|
||||
// can't really make a difference to the rest of the
|
||||
// compiler; those regions are ignored for the
|
||||
// outlives relation, and hence don't affect trait
|
||||
// selection or auto traits, and they are erased
|
||||
// during codegen.
|
||||
|
||||
let generics = self.tcx.generics_of(def_id);
|
||||
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, kind)| {
|
||||
if index < generics.parent_count {
|
||||
// Accommodate missing regions in the parent kinds...
|
||||
self.fold_kind_no_missing_regions_error(kind)
|
||||
} else {
|
||||
// ...but not elsewhere.
|
||||
self.fold_kind_normally(kind)
|
||||
}
|
||||
}));
|
||||
|
||||
self.tcx.mk_closure(def_id, substs)
|
||||
}
|
||||
|
||||
ty::Generator(def_id, substs, movability) => {
|
||||
let generics = self.tcx.generics_of(def_id);
|
||||
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, kind)| {
|
||||
if index < generics.parent_count {
|
||||
// Accommodate missing regions in the parent kinds...
|
||||
self.fold_kind_no_missing_regions_error(kind)
|
||||
} else {
|
||||
// ...but not elsewhere.
|
||||
self.fold_kind_normally(kind)
|
||||
}
|
||||
}));
|
||||
|
||||
self.tcx.mk_generator(def_id, substs, movability)
|
||||
}
|
||||
|
||||
ty::Param(param) => {
|
||||
// Look it up in the substitution list.
|
||||
match self.map.get(&ty.into()).map(|k| k.unpack()) {
|
||||
// Found it in the substitution list; replace with the parameter from the
|
||||
// opaque type.
|
||||
Some(GenericArgKind::Type(t1)) => t1,
|
||||
Some(u) => panic!("type mapped to unexpected kind: {:?}", u),
|
||||
None => {
|
||||
debug!(?param, ?self.map);
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
self.span,
|
||||
&format!(
|
||||
"type parameter `{}` is part of concrete type but not \
|
||||
used in parameter list for the `impl Trait` type alias",
|
||||
ty
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
|
||||
self.tcx().ty_error()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => ty.super_fold_with(self),
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
|
||||
trace!("checking const {:?}", ct);
|
||||
// Find a const parameter
|
||||
match ct.kind() {
|
||||
ty::ConstKind::Param(..) => {
|
||||
// Look it up in the substitution list.
|
||||
match self.map.get(&ct.into()).map(|k| k.unpack()) {
|
||||
// Found it in the substitution list, replace with the parameter from the
|
||||
// opaque type.
|
||||
Some(GenericArgKind::Const(c1)) => c1,
|
||||
Some(u) => panic!("const mapped to unexpected kind: {:?}", u),
|
||||
None => {
|
||||
self.tcx.sess.emit_err(ConstNotUsedTraitAlias {
|
||||
ct: ct.to_string(),
|
||||
span: self.span,
|
||||
});
|
||||
|
||||
self.tcx().const_error(ct.ty())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => ct,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use rustc_index::vec::IndexVec;
|
||||
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
|
||||
use rustc_middle::mir::visit::{MutVisitor, TyContext};
|
||||
use rustc_middle::mir::Constant;
|
||||
use rustc_middle::mir::{Body, Location, Promoted};
|
||||
use rustc_middle::mir::{Constant, ConstantKind};
|
||||
use rustc_middle::ty::subst::SubstsRef;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
|
||||
|
@ -10,7 +10,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
|
|||
/// inference variables, returning the number of variables created.
|
||||
#[instrument(skip(infcx, body, promoted), level = "debug")]
|
||||
pub fn renumber_mir<'tcx>(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
body: &mut Body<'tcx>,
|
||||
promoted: &mut IndexVec<Promoted, Body<'tcx>>,
|
||||
) {
|
||||
|
@ -28,7 +28,7 @@ pub fn renumber_mir<'tcx>(
|
|||
/// Replaces all regions appearing in `value` with fresh inference
|
||||
/// variables.
|
||||
#[instrument(skip(infcx), level = "debug")]
|
||||
pub fn renumber_regions<'tcx, T>(infcx: &InferCtxt<'_, 'tcx>, value: T) -> T
|
||||
pub fn renumber_regions<'tcx, T>(infcx: &InferCtxt<'tcx>, value: T) -> T
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
|
@ -38,23 +38,8 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
// FIXME(valtrees): This function is necessary because `fold_regions`
|
||||
// panics for mir constants in the visitor.
|
||||
//
|
||||
// Once `visit_mir_constant` is removed we can also remove this function
|
||||
// and just use `renumber_regions`.
|
||||
fn renumber_regions_in_mir_constant<'tcx>(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
value: ConstantKind<'tcx>,
|
||||
) -> ConstantKind<'tcx> {
|
||||
infcx.tcx.super_fold_regions(value, |_region, _depth| {
|
||||
let origin = NllRegionVariableOrigin::Existential { from_forall: false };
|
||||
infcx.next_nll_region_var(origin)
|
||||
})
|
||||
}
|
||||
|
||||
struct NllVisitor<'a, 'tcx> {
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
infcx: &'a InferCtxt<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> NllVisitor<'a, 'tcx> {
|
||||
|
@ -64,13 +49,6 @@ impl<'a, 'tcx> NllVisitor<'a, 'tcx> {
|
|||
{
|
||||
renumber_regions(self.infcx, value)
|
||||
}
|
||||
|
||||
fn renumber_regions_in_mir_constant(
|
||||
&mut self,
|
||||
value: ConstantKind<'tcx>,
|
||||
) -> ConstantKind<'tcx> {
|
||||
renumber_regions_in_mir_constant(self.infcx, value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> MutVisitor<'tcx> for NllVisitor<'a, 'tcx> {
|
||||
|
@ -103,7 +81,7 @@ impl<'a, 'tcx> MutVisitor<'tcx> for NllVisitor<'a, 'tcx> {
|
|||
#[instrument(skip(self), level = "debug")]
|
||||
fn visit_constant(&mut self, constant: &mut Constant<'tcx>, _location: Location) {
|
||||
let literal = constant.literal;
|
||||
constant.literal = self.renumber_regions_in_mir_constant(literal);
|
||||
constant.literal = self.renumber_regions(literal);
|
||||
debug!("constant: {:#?}", constant);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,15 +52,6 @@ pub(crate) struct VarNeedNotMut {
|
|||
#[suggestion_short(applicability = "machine-applicable", code = "")]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(borrowck::const_not_used_in_type_alias)]
|
||||
pub(crate) struct ConstNotUsedTraitAlias {
|
||||
pub ct: String,
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(borrowck::var_cannot_escape_closure)]
|
||||
#[note]
|
||||
|
|
|
@ -28,7 +28,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
pub(super) fn fully_perform_op<R: fmt::Debug, Op>(
|
||||
&mut self,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
op: Op,
|
||||
) -> Fallible<R>
|
||||
where
|
||||
|
@ -52,11 +52,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
Some(error_info) => error_info.to_universe_info(old_universe),
|
||||
None => UniverseInfo::other(),
|
||||
};
|
||||
for u in old_universe..universe {
|
||||
self.borrowck_context
|
||||
.constraints
|
||||
.universe_causes
|
||||
.insert(u + 1, universe_info.clone());
|
||||
for u in (old_universe + 1)..=universe {
|
||||
self.borrowck_context.constraints.universe_causes.insert(u, universe_info.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,15 +68,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
let old_universe = self.infcx.universe();
|
||||
|
||||
let (instantiated, _) =
|
||||
self.infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical);
|
||||
|
||||
for u in 0..canonical.max_universe.as_u32() {
|
||||
let info = UniverseInfo::other();
|
||||
self.borrowck_context
|
||||
.constraints
|
||||
.universe_causes
|
||||
.insert(ty::UniverseIndex::from_u32(u), info);
|
||||
for u in (old_universe + 1)..=self.infcx.universe() {
|
||||
self.borrowck_context.constraints.universe_causes.insert(u, UniverseInfo::other());
|
||||
}
|
||||
|
||||
instantiated
|
||||
|
@ -90,7 +85,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
&mut self,
|
||||
trait_ref: ty::TraitRef<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) {
|
||||
self.prove_predicate(
|
||||
ty::Binder::dummy(ty::PredicateKind::Trait(ty::TraitPredicate {
|
||||
|
@ -129,7 +124,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
&mut self,
|
||||
predicates: impl IntoIterator<Item = impl ToPredicate<'tcx>>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) {
|
||||
for predicate in predicates {
|
||||
let predicate = predicate.to_predicate(self.tcx());
|
||||
|
@ -144,7 +139,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
&mut self,
|
||||
predicate: ty::Predicate<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) {
|
||||
let param_env = self.param_env;
|
||||
self.fully_perform_op(
|
||||
|
@ -169,7 +164,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
&mut self,
|
||||
value: T,
|
||||
location: impl NormalizeLocation,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) -> T
|
||||
where
|
||||
T: type_op::normalize::Normalizable<'tcx> + fmt::Display + Copy + 'tcx,
|
||||
|
|
|
@ -19,7 +19,7 @@ use crate::{
|
|||
};
|
||||
|
||||
pub(crate) struct ConstraintConversion<'a, 'tcx> {
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
infcx: &'a InferCtxt<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
universal_regions: &'a UniversalRegions<'tcx>,
|
||||
/// Each RBP `GK: 'a` is assumed to be true. These encode
|
||||
|
@ -37,20 +37,20 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> {
|
|||
param_env: ty::ParamEnv<'tcx>,
|
||||
locations: Locations,
|
||||
span: Span,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
|
||||
pub(crate) fn new(
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
infcx: &'a InferCtxt<'tcx>,
|
||||
universal_regions: &'a UniversalRegions<'tcx>,
|
||||
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
locations: Locations,
|
||||
span: Span,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
|
||||
) -> Self {
|
||||
Self {
|
||||
|
@ -175,7 +175,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
|
|||
&mut self,
|
||||
sup: ty::RegionVid,
|
||||
sub: ty::RegionVid,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) {
|
||||
let category = match self.category {
|
||||
ConstraintCategory::Boring | ConstraintCategory::BoringNoLocation => category,
|
||||
|
@ -203,7 +203,7 @@ impl<'a, 'b, 'tcx> TypeOutlivesDelegate<'tcx> for &'a mut ConstraintConversion<'
|
|||
_origin: SubregionOrigin<'tcx>,
|
||||
a: ty::Region<'tcx>,
|
||||
b: ty::Region<'tcx>,
|
||||
constraint_category: ConstraintCategory<'tcx>,
|
||||
constraint_category: ConstraintCategory,
|
||||
) {
|
||||
let b = self.to_region_vid(b);
|
||||
let a = self.to_region_vid(a);
|
||||
|
|
|
@ -8,7 +8,6 @@ use rustc_infer::infer::InferCtxt;
|
|||
use rustc_middle::mir::ConstraintCategory;
|
||||
use rustc_middle::traits::query::OutlivesBound;
|
||||
use rustc_middle::ty::{self, RegionVid, Ty};
|
||||
use rustc_span::DUMMY_SP;
|
||||
use rustc_trait_selection::traits::query::type_op::{self, TypeOp};
|
||||
use std::rc::Rc;
|
||||
use type_op::TypeOpOutput;
|
||||
|
@ -48,7 +47,7 @@ pub(crate) struct CreateResult<'tcx> {
|
|||
}
|
||||
|
||||
pub(crate) fn create<'tcx>(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
universal_regions: &Rc<UniversalRegions<'tcx>>,
|
||||
|
@ -197,7 +196,7 @@ impl UniversalRegionRelations<'_> {
|
|||
}
|
||||
|
||||
struct UniversalRegionRelationsBuilder<'this, 'tcx> {
|
||||
infcx: &'this InferCtxt<'this, 'tcx>,
|
||||
infcx: &'this InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
universal_regions: Rc<UniversalRegions<'tcx>>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
|
@ -219,6 +218,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||
}
|
||||
|
||||
pub(crate) fn create(mut self) -> CreateResult<'tcx> {
|
||||
let span = self.infcx.tcx.def_span(self.universal_regions.defining_ty.def_id());
|
||||
let unnormalized_input_output_tys = self
|
||||
.universal_regions
|
||||
.unnormalized_input_tys
|
||||
|
@ -250,7 +250,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||
self.infcx
|
||||
.tcx
|
||||
.sess
|
||||
.delay_span_bug(DUMMY_SP, &format!("failed to normalize {:?}", ty));
|
||||
.delay_span_bug(span, &format!("failed to normalize {:?}", ty));
|
||||
TypeOpOutput {
|
||||
output: self.infcx.tcx.ty_error(),
|
||||
constraints: None,
|
||||
|
@ -301,8 +301,8 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||
&self.region_bound_pairs,
|
||||
self.implicit_region_bound,
|
||||
self.param_env,
|
||||
Locations::All(DUMMY_SP),
|
||||
DUMMY_SP,
|
||||
Locations::All(span),
|
||||
span,
|
||||
ConstraintCategory::Internal,
|
||||
&mut self.constraints,
|
||||
)
|
||||
|
@ -362,6 +362,11 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||
self.region_bound_pairs
|
||||
.insert(ty::OutlivesPredicate(GenericKind::Projection(projection_b), r_a));
|
||||
}
|
||||
|
||||
OutlivesBound::RegionSubOpaque(r_a, def_id, substs) => {
|
||||
self.region_bound_pairs
|
||||
.insert(ty::OutlivesPredicate(GenericKind::Opaque(def_id, substs), r_a));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,16 +7,11 @@
|
|||
//! `RETURN_PLACE` the MIR arguments) are always fully normalized (and
|
||||
//! contain revealed `impl Trait` values).
|
||||
|
||||
use crate::type_check::constraint_conversion::ConstraintConversion;
|
||||
use rustc_index::vec::Idx;
|
||||
use rustc_infer::infer::LateBoundRegionConversionTime;
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_span::Span;
|
||||
use rustc_span::DUMMY_SP;
|
||||
use rustc_trait_selection::traits::query::type_op::{self, TypeOp};
|
||||
use rustc_trait_selection::traits::query::Fallible;
|
||||
use type_op::TypeOpOutput;
|
||||
|
||||
use crate::universal_regions::UniversalRegions;
|
||||
|
||||
|
@ -185,7 +180,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(self, span), level = "debug")]
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn equate_normalized_input_or_output(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, span: Span) {
|
||||
if let Err(_) =
|
||||
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
|
||||
|
@ -194,13 +189,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
// `rustc_traits::normalize_after_erasing_regions`. Ideally, we'd
|
||||
// like to normalize *before* inserting into `local_decls`, but
|
||||
// doing so ends up causing some other trouble.
|
||||
let b = match self.normalize_and_add_constraints(b) {
|
||||
Ok(n) => n,
|
||||
Err(_) => {
|
||||
debug!("equate_inputs_and_outputs: NoSolution");
|
||||
b
|
||||
}
|
||||
};
|
||||
let b = self.normalize(b, Locations::All(span));
|
||||
|
||||
// Note: if we have to introduce new placeholders during normalization above, then we won't have
|
||||
// added those universes to the universe info, which we would want in `relate_tys`.
|
||||
|
@ -218,28 +207,4 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn normalize_and_add_constraints(&mut self, t: Ty<'tcx>) -> Fallible<Ty<'tcx>> {
|
||||
let TypeOpOutput { output: norm_ty, constraints, .. } =
|
||||
self.param_env.and(type_op::normalize::Normalize::new(t)).fully_perform(self.infcx)?;
|
||||
|
||||
debug!("{:?} normalized to {:?}", t, norm_ty);
|
||||
|
||||
for data in constraints {
|
||||
ConstraintConversion::new(
|
||||
self.infcx,
|
||||
&self.borrowck_context.universal_regions,
|
||||
&self.region_bound_pairs,
|
||||
self.implicit_region_bound,
|
||||
self.param_env,
|
||||
Locations::All(DUMMY_SP),
|
||||
DUMMY_SP,
|
||||
ConstraintCategory::Internal,
|
||||
&mut self.borrowck_context.constraints,
|
||||
)
|
||||
.convert_all(&*data);
|
||||
}
|
||||
|
||||
Ok(norm_ty)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ mod relate_tys;
|
|||
/// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysis
|
||||
/// - `elements` -- MIR region map
|
||||
pub(crate) fn type_check<'mir, 'tcx>(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
promoted: &IndexVec<Promoted, Body<'tcx>>,
|
||||
|
@ -138,8 +138,6 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||
use_polonius: bool,
|
||||
) -> MirTypeckResults<'tcx> {
|
||||
let implicit_region_bound = infcx.tcx.mk_region(ty::ReVar(universal_regions.fr_fn_body));
|
||||
let mut universe_causes = FxHashMap::default();
|
||||
universe_causes.insert(ty::UniverseIndex::from_u32(0), UniverseInfo::other());
|
||||
let mut constraints = MirTypeckRegionConstraints {
|
||||
placeholder_indices: PlaceholderIndices::default(),
|
||||
placeholder_index_to_region: IndexVec::default(),
|
||||
|
@ -148,7 +146,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||
member_constraints: MemberConstraintSet::default(),
|
||||
closure_bounds_mapping: Default::default(),
|
||||
type_tests: Vec::default(),
|
||||
universe_causes,
|
||||
universe_causes: FxHashMap::default(),
|
||||
};
|
||||
|
||||
let CreateResult {
|
||||
|
@ -165,9 +163,8 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||
|
||||
debug!(?normalized_inputs_and_output);
|
||||
|
||||
for u in ty::UniverseIndex::ROOT..infcx.universe() {
|
||||
let info = UniverseInfo::other();
|
||||
constraints.universe_causes.insert(u, info);
|
||||
for u in ty::UniverseIndex::ROOT..=infcx.universe() {
|
||||
constraints.universe_causes.insert(u, UniverseInfo::other());
|
||||
}
|
||||
|
||||
let mut borrowck_context = BorrowCheckContext {
|
||||
|
@ -236,7 +233,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||
.unwrap();
|
||||
let mut hidden_type = infcx.resolve_vars_if_possible(decl.hidden_type);
|
||||
trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind());
|
||||
if hidden_type.has_infer_types_or_consts() {
|
||||
if hidden_type.has_non_region_infer() {
|
||||
infcx.tcx.sess.delay_span_bug(
|
||||
decl.hidden_type.span,
|
||||
&format!("could not resolve {:#?}", hidden_type.ty.kind()),
|
||||
|
@ -587,6 +584,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
// modify their locations.
|
||||
let all_facts = &mut None;
|
||||
let mut constraints = Default::default();
|
||||
let mut type_tests = Default::default();
|
||||
let mut closure_bounds = Default::default();
|
||||
let mut liveness_constraints =
|
||||
LivenessValues::new(Rc::new(RegionValueElements::new(&promoted_body)));
|
||||
|
@ -598,6 +596,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
&mut this.cx.borrowck_context.constraints.outlives_constraints,
|
||||
&mut constraints,
|
||||
);
|
||||
mem::swap(&mut this.cx.borrowck_context.constraints.type_tests, &mut type_tests);
|
||||
mem::swap(
|
||||
&mut this.cx.borrowck_context.constraints.closure_bounds_mapping,
|
||||
&mut closure_bounds,
|
||||
|
@ -622,6 +621,13 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
swap_constraints(self);
|
||||
|
||||
let locations = location.to_locations();
|
||||
|
||||
// Use location of promoted const in collected constraints
|
||||
for type_test in type_tests.iter() {
|
||||
let mut type_test = type_test.clone();
|
||||
type_test.locations = locations;
|
||||
self.cx.borrowck_context.constraints.type_tests.push(type_test)
|
||||
}
|
||||
for constraint in constraints.outlives().iter() {
|
||||
let mut constraint = constraint.clone();
|
||||
constraint.locations = locations;
|
||||
|
@ -876,7 +882,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
/// way, it accrues region constraints -- these can later be used by
|
||||
/// NLL region checking.
|
||||
struct TypeChecker<'a, 'tcx> {
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
infcx: &'a InferCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
last_span: Span,
|
||||
body: &'a Body<'tcx>,
|
||||
|
@ -936,7 +942,7 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> {
|
|||
pub(crate) member_constraints: MemberConstraintSet<'tcx, RegionVid>,
|
||||
|
||||
pub(crate) closure_bounds_mapping:
|
||||
FxHashMap<Location, FxHashMap<(RegionVid, RegionVid), (ConstraintCategory<'tcx>, Span)>>,
|
||||
FxHashMap<Location, FxHashMap<(RegionVid, RegionVid), (ConstraintCategory, Span)>>,
|
||||
|
||||
pub(crate) universe_causes: FxHashMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
|
||||
|
||||
|
@ -946,7 +952,7 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> {
|
|||
impl<'tcx> MirTypeckRegionConstraints<'tcx> {
|
||||
fn placeholder_region(
|
||||
&mut self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
placeholder: ty::PlaceholderRegion,
|
||||
) -> ty::Region<'tcx> {
|
||||
let placeholder_index = self.placeholder_indices.insert(placeholder);
|
||||
|
@ -1030,7 +1036,7 @@ impl Locations {
|
|||
|
||||
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
fn new(
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
infcx: &'a InferCtxt<'tcx>,
|
||||
body: &'a Body<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
|
||||
|
@ -1127,7 +1133,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
fn push_region_constraints(
|
||||
&mut self,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
data: &QueryRegionConstraints<'tcx>,
|
||||
) {
|
||||
debug!("constraints generated: {:#?}", data);
|
||||
|
@ -1152,7 +1158,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
sub: Ty<'tcx>,
|
||||
sup: Ty<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) -> Fallible<()> {
|
||||
// Use this order of parameters because the sup type is usually the
|
||||
// "expected" type in diagnostics.
|
||||
|
@ -1165,7 +1171,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
expected: Ty<'tcx>,
|
||||
found: Ty<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) -> Fallible<()> {
|
||||
self.relate_types(expected, ty::Variance::Invariant, found, locations, category)
|
||||
}
|
||||
|
@ -1177,7 +1183,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
v: ty::Variance,
|
||||
user_ty: &UserTypeProjection,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) -> Fallible<()> {
|
||||
let annotated_type = self.user_type_annotations[user_ty.base].inferred_ty;
|
||||
let mut curr_projected_ty = PlaceTy::from_ty(annotated_type);
|
||||
|
@ -1612,19 +1618,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
|
||||
}
|
||||
|
||||
let func_ty = if let TerminatorKind::Call { func, .. } = &term.kind {
|
||||
Some(func.ty(body, self.infcx.tcx))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
debug!(?func_ty);
|
||||
|
||||
for (n, (fn_arg, op_arg)) in iter::zip(sig.inputs(), args).enumerate() {
|
||||
let op_arg_ty = op_arg.ty(body, self.tcx());
|
||||
|
||||
let op_arg_ty = self.normalize(op_arg_ty, term_location);
|
||||
let category = if from_hir_call {
|
||||
ConstraintCategory::CallArgument(func_ty)
|
||||
ConstraintCategory::CallArgument(term_location)
|
||||
} else {
|
||||
ConstraintCategory::Boring
|
||||
};
|
||||
|
@ -2209,25 +2208,104 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
CastKind::Misc => {
|
||||
CastKind::IntToInt => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(*ty);
|
||||
// Misc casts are either between floats and ints, or one ptr type to another.
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(
|
||||
Some(CastTy::Int(_) | CastTy::Float),
|
||||
Some(CastTy::Int(_) | CastTy::Float),
|
||||
)
|
||||
| (Some(CastTy::Ptr(_) | CastTy::FnPtr), Some(CastTy::Ptr(_))) => (),
|
||||
(Some(CastTy::Int(_)), Some(CastTy::Int(_))) => (),
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid Misc cast {:?} -> {:?}",
|
||||
"Invalid IntToInt cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty,
|
||||
ty
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
CastKind::IntToFloat => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(*ty);
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(Some(CastTy::Int(_)), Some(CastTy::Float)) => (),
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid IntToFloat cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
CastKind::FloatToInt => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(*ty);
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(Some(CastTy::Float), Some(CastTy::Int(_))) => (),
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid FloatToInt cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
CastKind::FloatToFloat => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(*ty);
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(Some(CastTy::Float), Some(CastTy::Float)) => (),
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid FloatToFloat cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
CastKind::FnPtrToPtr => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(*ty);
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(Some(CastTy::FnPtr), Some(CastTy::Ptr(_))) => (),
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid FnPtrToPtr cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
CastKind::PtrToPtr => {
|
||||
let ty_from = op.ty(body, tcx);
|
||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||
let cast_ty_to = CastTy::from_ty(*ty);
|
||||
match (cast_ty_from, cast_ty_to) {
|
||||
(Some(CastTy::Ptr(_)), Some(CastTy::Ptr(_))) => (),
|
||||
_ => {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
"Invalid PtrToPtr cast {:?} -> {:?}",
|
||||
ty_from,
|
||||
ty
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -2744,7 +2822,7 @@ impl<'tcx> TypeOp<'tcx> for InstantiateOpaqueType<'tcx> {
|
|||
/// constraints in our `InferCtxt`
|
||||
type ErrorInfo = InstantiateOpaqueType<'tcx>;
|
||||
|
||||
fn fully_perform(mut self, infcx: &InferCtxt<'_, 'tcx>) -> Fallible<TypeOpOutput<'tcx, Self>> {
|
||||
fn fully_perform(mut self, infcx: &InferCtxt<'tcx>) -> Fallible<TypeOpOutput<'tcx, Self>> {
|
||||
let (mut output, region_constraints) = scrape_region_constraints(infcx, || {
|
||||
Ok(InferOk { value: (), obligations: self.obligations.clone() })
|
||||
})?;
|
||||
|
|
|
@ -28,7 +28,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
v: ty::Variance,
|
||||
b: Ty<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) -> Fallible<()> {
|
||||
TypeRelating::new(
|
||||
self.infcx,
|
||||
|
@ -45,7 +45,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
a: ty::SubstsRef<'tcx>,
|
||||
b: ty::SubstsRef<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
) -> Fallible<()> {
|
||||
TypeRelating::new(
|
||||
self.infcx,
|
||||
|
@ -64,7 +64,7 @@ struct NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
|
|||
locations: Locations,
|
||||
|
||||
/// What category do we assign the resulting `'a: 'b` relationships?
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
|
||||
/// Information so that error reporting knows what types we are relating
|
||||
/// when reporting a bound region error.
|
||||
|
@ -75,7 +75,7 @@ impl<'me, 'bccx, 'tcx> NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
|
|||
fn new(
|
||||
type_checker: &'me mut TypeChecker<'bccx, 'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
category: ConstraintCategory,
|
||||
universe_info: UniverseInfo<'tcx>,
|
||||
) -> Self {
|
||||
Self { type_checker, locations, category, universe_info }
|
||||
|
|
|
@ -219,7 +219,7 @@ impl<'tcx> UniversalRegions<'tcx> {
|
|||
/// signature. This will also compute the relationships that are
|
||||
/// known between those regions.
|
||||
pub fn new(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
mir_def: ty::WithOptConstParam<LocalDefId>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
) -> Self {
|
||||
|
@ -382,7 +382,7 @@ impl<'tcx> UniversalRegions<'tcx> {
|
|||
}
|
||||
|
||||
struct UniversalRegionsBuilder<'cx, 'tcx> {
|
||||
infcx: &'cx InferCtxt<'cx, 'tcx>,
|
||||
infcx: &'cx InferCtxt<'tcx>,
|
||||
mir_def: ty::WithOptConstParam<LocalDefId>,
|
||||
mir_hir_id: HirId,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
|
@ -414,7 +414,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
|||
|
||||
let typeck_root_def_id = self.infcx.tcx.typeck_root_def_id(self.mir_def.did.to_def_id());
|
||||
|
||||
// If this is is a 'root' body (not a closure/generator/inline const), then
|
||||
// If this is a 'root' body (not a closure/generator/inline const), then
|
||||
// there are no extern regions, so the local regions start at the same
|
||||
// position as the (empty) sub-list of extern regions
|
||||
let first_local_index = if self.mir_def.did.to_def_id() == typeck_root_def_id {
|
||||
|
@ -699,7 +699,7 @@ trait InferCtxtExt<'tcx> {
|
|||
);
|
||||
}
|
||||
|
||||
impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> {
|
||||
impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
||||
fn replace_free_regions_with_nll_infer_vars<T>(
|
||||
&self,
|
||||
origin: NllRegionVariableOrigin,
|
||||
|
|
|
@ -58,6 +58,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
/// Builds the whole `assert!` expression. For example, `let elem = 1; assert!(elem == 1);` expands to:
|
||||
///
|
||||
/// ```rust
|
||||
/// #![feature(generic_assert_internals)]
|
||||
/// let elem = 1;
|
||||
/// {
|
||||
/// #[allow(unused_imports)]
|
||||
|
@ -70,7 +71,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
/// __local_bind0
|
||||
/// } == 1
|
||||
/// ) {
|
||||
/// panic!("Assertion failed: elem == 1\nWith captures:\n elem = {}", __capture0)
|
||||
/// panic!("Assertion failed: elem == 1\nWith captures:\n elem = {:?}", __capture0)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
|
|
|
@ -16,6 +16,7 @@ pub fn expand_deriving_copy(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(marker::Copy),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: true,
|
||||
|
|
|
@ -72,6 +72,7 @@ pub fn expand_deriving_clone(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(clone::Clone),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: bounds,
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: true,
|
||||
|
|
|
@ -25,6 +25,7 @@ pub fn expand_deriving_eq(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(cmp::Eq),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: true,
|
||||
|
|
|
@ -19,6 +19,7 @@ pub fn expand_deriving_ord(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(cmp::Ord),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -83,6 +83,7 @@ pub fn expand_deriving_partial_eq(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(cmp::PartialEq),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -37,6 +37,7 @@ pub fn expand_deriving_partial_ord(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(cmp::PartialOrd),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: vec![],
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -20,6 +20,7 @@ pub fn expand_deriving_debug(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: path_std!(fmt::Debug),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -23,6 +23,7 @@ pub fn expand_deriving_rustc_decodable(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: Path::new_(vec![krate, sym::Decodable], vec![], PathKind::Global),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -24,6 +24,7 @@ pub fn expand_deriving_default(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: Path::new(vec![kw::Default, sym::Default]),
|
||||
skip_path_as_bound: has_a_default_variant(item),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
@ -262,3 +263,22 @@ impl<'a, 'b> rustc_ast::visit::Visitor<'a> for DetectNonVariantDefaultAttr<'a, '
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_a_default_variant(item: &Annotatable) -> bool {
|
||||
struct HasDefaultAttrOnVariant {
|
||||
found: bool,
|
||||
}
|
||||
|
||||
impl<'ast> rustc_ast::visit::Visitor<'ast> for HasDefaultAttrOnVariant {
|
||||
fn visit_variant(&mut self, v: &'ast rustc_ast::Variant) {
|
||||
if v.attrs.iter().any(|attr| attr.has_name(kw::Default)) {
|
||||
self.found = true;
|
||||
}
|
||||
// no need to subrecurse.
|
||||
}
|
||||
}
|
||||
|
||||
let mut visitor = HasDefaultAttrOnVariant { found: false };
|
||||
item.visit_with(&mut visitor);
|
||||
visitor.found
|
||||
}
|
||||
|
|
|
@ -107,6 +107,7 @@ pub fn expand_deriving_rustc_encodable(
|
|||
let trait_def = TraitDef {
|
||||
span,
|
||||
path: Path::new_(vec![krate, sym::Encodable], vec![], PathKind::Global),
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -174,6 +174,7 @@ use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
|||
use rustc_span::Span;
|
||||
use std::cell::RefCell;
|
||||
use std::iter;
|
||||
use std::ops::Not;
|
||||
use std::vec;
|
||||
use thin_vec::thin_vec;
|
||||
use ty::{Bounds, Path, Ref, Self_, Ty};
|
||||
|
@ -187,6 +188,9 @@ pub struct TraitDef<'a> {
|
|||
/// Path of the trait, including any type parameters
|
||||
pub path: Path,
|
||||
|
||||
/// Whether to skip adding the current trait as a bound to the type parameters of the type.
|
||||
pub skip_path_as_bound: bool,
|
||||
|
||||
/// Additional bounds required of any type parameters of the type,
|
||||
/// other than the current trait
|
||||
pub additional_bounds: Vec<Ty>,
|
||||
|
@ -562,7 +566,7 @@ impl<'a> TraitDef<'a> {
|
|||
tokens: None,
|
||||
},
|
||||
attrs: ast::AttrVec::new(),
|
||||
kind: ast::AssocItemKind::TyAlias(Box::new(ast::TyAlias {
|
||||
kind: ast::AssocItemKind::Type(Box::new(ast::TyAlias {
|
||||
defaultness: ast::Defaultness::Final,
|
||||
generics: Generics::default(),
|
||||
where_clauses: (
|
||||
|
@ -596,7 +600,7 @@ impl<'a> TraitDef<'a> {
|
|||
cx.trait_bound(p.to_path(cx, self.span, type_ident, generics))
|
||||
}).chain(
|
||||
// require the current trait
|
||||
iter::once(cx.trait_bound(trait_path.clone()))
|
||||
self.skip_path_as_bound.not().then(|| cx.trait_bound(trait_path.clone()))
|
||||
).chain(
|
||||
// also add in any bounds from the declaration
|
||||
param.bounds.iter().cloned()
|
||||
|
@ -1110,6 +1114,11 @@ impl<'a> MethodDef<'a> {
|
|||
/// ```
|
||||
/// is equivalent to:
|
||||
/// ```
|
||||
/// #![feature(core_intrinsics)]
|
||||
/// enum A {
|
||||
/// A1,
|
||||
/// A2(i32)
|
||||
/// }
|
||||
/// impl ::core::cmp::PartialEq for A {
|
||||
/// #[inline]
|
||||
/// fn eq(&self, other: &A) -> bool {
|
||||
|
|
|
@ -22,6 +22,7 @@ pub fn expand_deriving_hash(
|
|||
let hash_trait_def = TraitDef {
|
||||
span,
|
||||
path,
|
||||
skip_path_as_bound: false,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
supports_unions: false,
|
||||
|
|
|
@ -131,6 +131,8 @@ fn inject_impl_of_structural_trait(
|
|||
// Create generics param list for where clauses and impl headers
|
||||
let mut generics = generics.clone();
|
||||
|
||||
let ctxt = span.ctxt();
|
||||
|
||||
// Create the type of `self`.
|
||||
//
|
||||
// in addition, remove defaults from generic params (impls cannot have them).
|
||||
|
@ -138,16 +140,18 @@ fn inject_impl_of_structural_trait(
|
|||
.params
|
||||
.iter_mut()
|
||||
.map(|param| match &mut param.kind {
|
||||
ast::GenericParamKind::Lifetime => {
|
||||
ast::GenericArg::Lifetime(cx.lifetime(span, param.ident))
|
||||
}
|
||||
ast::GenericParamKind::Lifetime => ast::GenericArg::Lifetime(
|
||||
cx.lifetime(param.ident.span.with_ctxt(ctxt), param.ident),
|
||||
),
|
||||
ast::GenericParamKind::Type { default } => {
|
||||
*default = None;
|
||||
ast::GenericArg::Type(cx.ty_ident(span, param.ident))
|
||||
ast::GenericArg::Type(cx.ty_ident(param.ident.span.with_ctxt(ctxt), param.ident))
|
||||
}
|
||||
ast::GenericParamKind::Const { ty: _, kw_span: _, default } => {
|
||||
*default = None;
|
||||
ast::GenericArg::Const(cx.const_ident(span, param.ident))
|
||||
ast::GenericArg::Const(
|
||||
cx.const_ident(param.ident.span.with_ctxt(ctxt), param.ident),
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
@ -174,6 +178,8 @@ fn inject_impl_of_structural_trait(
|
|||
})
|
||||
.cloned(),
|
||||
);
|
||||
// Mark as `automatically_derived` to avoid some silly lints.
|
||||
attrs.push(cx.attribute(cx.meta_word(span, sym::automatically_derived)));
|
||||
|
||||
let newitem = cx.item(
|
||||
span,
|
||||
|
|
File diff suppressed because it is too large
Load diff
240
compiler/rustc_builtin_macros/src/format/ast.rs
Normal file
240
compiler/rustc_builtin_macros/src/format/ast.rs
Normal file
|
@ -0,0 +1,240 @@
|
|||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::Expr;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
|
||||
// Definitions:
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └──────────────────────────────────────────────┘
|
||||
// FormatArgs
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └─────────┘
|
||||
// argument
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └───────────────────┘
|
||||
// template
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └────┘└─────────┘└┘
|
||||
// pieces
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └────┘ └┘
|
||||
// literal pieces
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └─────────┘
|
||||
// placeholder
|
||||
//
|
||||
// format_args!("hello {abc:.xyz$}!!", abc="world");
|
||||
// └─┘ └─┘
|
||||
// positions (could be names, numbers, empty, or `*`)
|
||||
|
||||
/// (Parsed) format args.
|
||||
///
|
||||
/// Basically the "AST" for a complete `format_args!()`.
|
||||
///
|
||||
/// E.g., `format_args!("hello {name}");`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FormatArgs {
|
||||
pub span: Span,
|
||||
pub template: Vec<FormatArgsPiece>,
|
||||
pub arguments: FormatArguments,
|
||||
}
|
||||
|
||||
/// A piece of a format template string.
|
||||
///
|
||||
/// E.g. "hello" or "{name}".
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum FormatArgsPiece {
|
||||
Literal(Symbol),
|
||||
Placeholder(FormatPlaceholder),
|
||||
}
|
||||
|
||||
/// The arguments to format_args!().
|
||||
///
|
||||
/// E.g. `1, 2, name="ferris", n=3`,
|
||||
/// but also implicit captured arguments like `x` in `format_args!("{x}")`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FormatArguments {
|
||||
arguments: Vec<FormatArgument>,
|
||||
num_unnamed_args: usize,
|
||||
num_explicit_args: usize,
|
||||
names: FxHashMap<Symbol, usize>,
|
||||
}
|
||||
|
||||
impl FormatArguments {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
arguments: Vec::new(),
|
||||
names: FxHashMap::default(),
|
||||
num_unnamed_args: 0,
|
||||
num_explicit_args: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&mut self, arg: FormatArgument) -> usize {
|
||||
let index = self.arguments.len();
|
||||
if let Some(name) = arg.kind.ident() {
|
||||
self.names.insert(name.name, index);
|
||||
} else if self.names.is_empty() {
|
||||
// Only count the unnamed args before the first named arg.
|
||||
// (Any later ones are errors.)
|
||||
self.num_unnamed_args += 1;
|
||||
}
|
||||
if !matches!(arg.kind, FormatArgumentKind::Captured(..)) {
|
||||
// This is an explicit argument.
|
||||
// Make sure that all arguments so far are explcit.
|
||||
assert_eq!(
|
||||
self.num_explicit_args,
|
||||
self.arguments.len(),
|
||||
"captured arguments must be added last"
|
||||
);
|
||||
self.num_explicit_args += 1;
|
||||
}
|
||||
self.arguments.push(arg);
|
||||
index
|
||||
}
|
||||
|
||||
pub fn by_name(&self, name: Symbol) -> Option<(usize, &FormatArgument)> {
|
||||
let i = *self.names.get(&name)?;
|
||||
Some((i, &self.arguments[i]))
|
||||
}
|
||||
|
||||
pub fn by_index(&self, i: usize) -> Option<&FormatArgument> {
|
||||
(i < self.num_explicit_args).then(|| &self.arguments[i])
|
||||
}
|
||||
|
||||
pub fn unnamed_args(&self) -> &[FormatArgument] {
|
||||
&self.arguments[..self.num_unnamed_args]
|
||||
}
|
||||
|
||||
pub fn named_args(&self) -> &[FormatArgument] {
|
||||
&self.arguments[self.num_unnamed_args..self.num_explicit_args]
|
||||
}
|
||||
|
||||
pub fn explicit_args(&self) -> &[FormatArgument] {
|
||||
&self.arguments[..self.num_explicit_args]
|
||||
}
|
||||
|
||||
pub fn into_vec(self) -> Vec<FormatArgument> {
|
||||
self.arguments
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FormatArgument {
|
||||
pub kind: FormatArgumentKind,
|
||||
pub expr: P<Expr>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum FormatArgumentKind {
|
||||
/// `format_args(…, arg)`
|
||||
Normal,
|
||||
/// `format_args(…, arg = 1)`
|
||||
Named(Ident),
|
||||
/// `format_args("… {arg} …")`
|
||||
Captured(Ident),
|
||||
}
|
||||
|
||||
impl FormatArgumentKind {
|
||||
pub fn ident(&self) -> Option<Ident> {
|
||||
match self {
|
||||
&Self::Normal => None,
|
||||
&Self::Named(id) => Some(id),
|
||||
&Self::Captured(id) => Some(id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct FormatPlaceholder {
|
||||
/// Index into [`FormatArgs::arguments`].
|
||||
pub argument: FormatArgPosition,
|
||||
/// The span inside the format string for the full `{…}` placeholder.
|
||||
pub span: Option<Span>,
|
||||
/// `{}`, `{:?}`, or `{:x}`, etc.
|
||||
pub format_trait: FormatTrait,
|
||||
/// `{}` or `{:.5}` or `{:-^20}`, etc.
|
||||
pub format_options: FormatOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct FormatArgPosition {
|
||||
/// Which argument this position refers to (Ok),
|
||||
/// or would've referred to if it existed (Err).
|
||||
pub index: Result<usize, usize>,
|
||||
/// What kind of position this is. See [`FormatArgPositionKind`].
|
||||
pub kind: FormatArgPositionKind,
|
||||
/// The span of the name or number.
|
||||
pub span: Option<Span>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FormatArgPositionKind {
|
||||
/// `{}` or `{:.*}`
|
||||
Implicit,
|
||||
/// `{1}` or `{:1$}` or `{:.1$}`
|
||||
Number,
|
||||
/// `{a}` or `{:a$}` or `{:.a$}`
|
||||
Named,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum FormatTrait {
|
||||
/// `{}`
|
||||
Display,
|
||||
/// `{:?}`
|
||||
Debug,
|
||||
/// `{:e}`
|
||||
LowerExp,
|
||||
/// `{:E}`
|
||||
UpperExp,
|
||||
/// `{:o}`
|
||||
Octal,
|
||||
/// `{:p}`
|
||||
Pointer,
|
||||
/// `{:b}`
|
||||
Binary,
|
||||
/// `{:x}`
|
||||
LowerHex,
|
||||
/// `{:X}`
|
||||
UpperHex,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct FormatOptions {
|
||||
/// The width. E.g. `{:5}` or `{:width$}`.
|
||||
pub width: Option<FormatCount>,
|
||||
/// The precision. E.g. `{:.5}` or `{:.precision$}`.
|
||||
pub precision: Option<FormatCount>,
|
||||
/// The alignment. E.g. `{:>}` or `{:<}` or `{:^}`.
|
||||
pub alignment: Option<FormatAlignment>,
|
||||
/// The fill character. E.g. the `.` in `{:.>10}`.
|
||||
pub fill: Option<char>,
|
||||
/// The `+`, `-`, `0`, `#`, `x?` and `X?` flags.
|
||||
pub flags: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FormatAlignment {
|
||||
/// `{:<}`
|
||||
Left,
|
||||
/// `{:>}`
|
||||
Right,
|
||||
/// `{:^}`
|
||||
Center,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FormatCount {
|
||||
/// `{:5}` or `{:.5}`
|
||||
Literal(usize),
|
||||
/// `{:.*}`, `{:.5$}`, or `{:a$}`, etc.
|
||||
Argument(FormatArgPosition),
|
||||
}
|
353
compiler/rustc_builtin_macros/src/format/expand.rs
Normal file
353
compiler/rustc_builtin_macros/src/format/expand.rs
Normal file
|
@ -0,0 +1,353 @@
|
|||
use super::*;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::visit::{self, Visitor};
|
||||
use rustc_ast::{BlockCheckMode, UnsafeSource};
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_span::{sym, symbol::kw};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
enum ArgumentType {
|
||||
Format(FormatTrait),
|
||||
Usize,
|
||||
}
|
||||
|
||||
fn make_argument(ecx: &ExtCtxt<'_>, sp: Span, arg: P<ast::Expr>, ty: ArgumentType) -> P<ast::Expr> {
|
||||
// Generate:
|
||||
// ::core::fmt::ArgumentV1::new_…(arg)
|
||||
use ArgumentType::*;
|
||||
use FormatTrait::*;
|
||||
ecx.expr_call_global(
|
||||
sp,
|
||||
ecx.std_path(&[
|
||||
sym::fmt,
|
||||
sym::ArgumentV1,
|
||||
match ty {
|
||||
Format(Display) => sym::new_display,
|
||||
Format(Debug) => sym::new_debug,
|
||||
Format(LowerExp) => sym::new_lower_exp,
|
||||
Format(UpperExp) => sym::new_upper_exp,
|
||||
Format(Octal) => sym::new_octal,
|
||||
Format(Pointer) => sym::new_pointer,
|
||||
Format(Binary) => sym::new_binary,
|
||||
Format(LowerHex) => sym::new_lower_hex,
|
||||
Format(UpperHex) => sym::new_upper_hex,
|
||||
Usize => sym::from_usize,
|
||||
},
|
||||
]),
|
||||
vec![arg],
|
||||
)
|
||||
}
|
||||
|
||||
fn make_count(
|
||||
ecx: &ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
count: &Option<FormatCount>,
|
||||
argmap: &mut FxIndexSet<(usize, ArgumentType)>,
|
||||
) -> P<ast::Expr> {
|
||||
// Generate:
|
||||
// ::core::fmt::rt::v1::Count::…(…)
|
||||
match count {
|
||||
Some(FormatCount::Literal(n)) => ecx.expr_call_global(
|
||||
sp,
|
||||
ecx.std_path(&[sym::fmt, sym::rt, sym::v1, sym::Count, sym::Is]),
|
||||
vec![ecx.expr_usize(sp, *n)],
|
||||
),
|
||||
Some(FormatCount::Argument(arg)) => {
|
||||
if let Ok(arg_index) = arg.index {
|
||||
let (i, _) = argmap.insert_full((arg_index, ArgumentType::Usize));
|
||||
ecx.expr_call_global(
|
||||
sp,
|
||||
ecx.std_path(&[sym::fmt, sym::rt, sym::v1, sym::Count, sym::Param]),
|
||||
vec![ecx.expr_usize(sp, i)],
|
||||
)
|
||||
} else {
|
||||
DummyResult::raw_expr(sp, true)
|
||||
}
|
||||
}
|
||||
None => ecx.expr_path(ecx.path_global(
|
||||
sp,
|
||||
ecx.std_path(&[sym::fmt, sym::rt, sym::v1, sym::Count, sym::Implied]),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_format_spec(
|
||||
ecx: &ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
placeholder: &FormatPlaceholder,
|
||||
argmap: &mut FxIndexSet<(usize, ArgumentType)>,
|
||||
) -> P<ast::Expr> {
|
||||
// Generate:
|
||||
// ::core::fmt::rt::v1::Argument {
|
||||
// position: 0usize,
|
||||
// format: ::core::fmt::rt::v1::FormatSpec {
|
||||
// fill: ' ',
|
||||
// align: ::core::fmt::rt::v1::Alignment::Unknown,
|
||||
// flags: 0u32,
|
||||
// precision: ::core::fmt::rt::v1::Count::Implied,
|
||||
// width: ::core::fmt::rt::v1::Count::Implied,
|
||||
// },
|
||||
// }
|
||||
let position = match placeholder.argument.index {
|
||||
Ok(arg_index) => {
|
||||
let (i, _) =
|
||||
argmap.insert_full((arg_index, ArgumentType::Format(placeholder.format_trait)));
|
||||
ecx.expr_usize(sp, i)
|
||||
}
|
||||
Err(_) => DummyResult::raw_expr(sp, true),
|
||||
};
|
||||
let fill = ecx.expr_char(sp, placeholder.format_options.fill.unwrap_or(' '));
|
||||
let align = ecx.expr_path(ecx.path_global(
|
||||
sp,
|
||||
ecx.std_path(&[
|
||||
sym::fmt,
|
||||
sym::rt,
|
||||
sym::v1,
|
||||
sym::Alignment,
|
||||
match placeholder.format_options.alignment {
|
||||
Some(FormatAlignment::Left) => sym::Left,
|
||||
Some(FormatAlignment::Right) => sym::Right,
|
||||
Some(FormatAlignment::Center) => sym::Center,
|
||||
None => sym::Unknown,
|
||||
},
|
||||
]),
|
||||
));
|
||||
let flags = ecx.expr_u32(sp, placeholder.format_options.flags);
|
||||
let prec = make_count(ecx, sp, &placeholder.format_options.precision, argmap);
|
||||
let width = make_count(ecx, sp, &placeholder.format_options.width, argmap);
|
||||
ecx.expr_struct(
|
||||
sp,
|
||||
ecx.path_global(sp, ecx.std_path(&[sym::fmt, sym::rt, sym::v1, sym::Argument])),
|
||||
vec![
|
||||
ecx.field_imm(sp, Ident::new(sym::position, sp), position),
|
||||
ecx.field_imm(
|
||||
sp,
|
||||
Ident::new(sym::format, sp),
|
||||
ecx.expr_struct(
|
||||
sp,
|
||||
ecx.path_global(
|
||||
sp,
|
||||
ecx.std_path(&[sym::fmt, sym::rt, sym::v1, sym::FormatSpec]),
|
||||
),
|
||||
vec![
|
||||
ecx.field_imm(sp, Ident::new(sym::fill, sp), fill),
|
||||
ecx.field_imm(sp, Ident::new(sym::align, sp), align),
|
||||
ecx.field_imm(sp, Ident::new(sym::flags, sp), flags),
|
||||
ecx.field_imm(sp, Ident::new(sym::precision, sp), prec),
|
||||
ecx.field_imm(sp, Ident::new(sym::width, sp), width),
|
||||
],
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
pub fn expand_parsed_format_args(ecx: &mut ExtCtxt<'_>, fmt: FormatArgs) -> P<ast::Expr> {
|
||||
let macsp = ecx.with_def_site_ctxt(ecx.call_site());
|
||||
|
||||
let lit_pieces = ecx.expr_array_ref(
|
||||
fmt.span,
|
||||
fmt.template
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, piece)| match piece {
|
||||
&FormatArgsPiece::Literal(s) => Some(ecx.expr_str(fmt.span, s)),
|
||||
&FormatArgsPiece::Placeholder(_) => {
|
||||
// Inject empty string before placeholders when not already preceded by a literal piece.
|
||||
if i == 0 || matches!(fmt.template[i - 1], FormatArgsPiece::Placeholder(_)) {
|
||||
Some(ecx.expr_str(fmt.span, kw::Empty))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
|
||||
// Whether we'll use the `Arguments::new_v1_formatted` form (true),
|
||||
// or the `Arguments::new_v1` form (false).
|
||||
let mut use_format_options = false;
|
||||
|
||||
// Create a list of all _unique_ (argument, format trait) combinations.
|
||||
// E.g. "{0} {0:x} {0} {1}" -> [(0, Display), (0, LowerHex), (1, Display)]
|
||||
let mut argmap = FxIndexSet::default();
|
||||
for piece in &fmt.template {
|
||||
let FormatArgsPiece::Placeholder(placeholder) = piece else { continue };
|
||||
if placeholder.format_options != Default::default() {
|
||||
// Can't use basic form if there's any formatting options.
|
||||
use_format_options = true;
|
||||
}
|
||||
if let Ok(index) = placeholder.argument.index {
|
||||
if !argmap.insert((index, ArgumentType::Format(placeholder.format_trait))) {
|
||||
// Duplicate (argument, format trait) combination,
|
||||
// which we'll only put once in the args array.
|
||||
use_format_options = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let format_options = use_format_options.then(|| {
|
||||
// Generate:
|
||||
// &[format_spec_0, format_spec_1, format_spec_2]
|
||||
ecx.expr_array_ref(
|
||||
macsp,
|
||||
fmt.template
|
||||
.iter()
|
||||
.filter_map(|piece| {
|
||||
let FormatArgsPiece::Placeholder(placeholder) = piece else { return None };
|
||||
Some(make_format_spec(ecx, macsp, placeholder, &mut argmap))
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
});
|
||||
|
||||
let arguments = fmt.arguments.into_vec();
|
||||
|
||||
// If the args array contains exactly all the original arguments once,
|
||||
// in order, we can use a simple array instead of a `match` construction.
|
||||
// However, if there's a yield point in any argument except the first one,
|
||||
// we don't do this, because an ArgumentV1 cannot be kept across yield points.
|
||||
let use_simple_array = argmap.len() == arguments.len()
|
||||
&& argmap.iter().enumerate().all(|(i, &(j, _))| i == j)
|
||||
&& arguments.iter().skip(1).all(|arg| !may_contain_yield_point(&arg.expr));
|
||||
|
||||
let args = if use_simple_array {
|
||||
// Generate:
|
||||
// &[
|
||||
// ::core::fmt::ArgumentV1::new_display(&arg0),
|
||||
// ::core::fmt::ArgumentV1::new_lower_hex(&arg1),
|
||||
// ::core::fmt::ArgumentV1::new_debug(&arg2),
|
||||
// ]
|
||||
ecx.expr_array_ref(
|
||||
macsp,
|
||||
arguments
|
||||
.into_iter()
|
||||
.zip(argmap)
|
||||
.map(|(arg, (_, ty))| {
|
||||
let sp = arg.expr.span.with_ctxt(macsp.ctxt());
|
||||
make_argument(ecx, sp, ecx.expr_addr_of(sp, arg.expr), ty)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
// Generate:
|
||||
// match (&arg0, &arg1, &arg2) {
|
||||
// args => &[
|
||||
// ::core::fmt::ArgumentV1::new_display(args.0),
|
||||
// ::core::fmt::ArgumentV1::new_lower_hex(args.1),
|
||||
// ::core::fmt::ArgumentV1::new_debug(args.0),
|
||||
// ]
|
||||
// }
|
||||
let args_ident = Ident::new(sym::args, macsp);
|
||||
let args = argmap
|
||||
.iter()
|
||||
.map(|&(arg_index, ty)| {
|
||||
if let Some(arg) = arguments.get(arg_index) {
|
||||
let sp = arg.expr.span.with_ctxt(macsp.ctxt());
|
||||
make_argument(
|
||||
ecx,
|
||||
sp,
|
||||
ecx.expr_field(
|
||||
sp,
|
||||
ecx.expr_ident(macsp, args_ident),
|
||||
Ident::new(sym::integer(arg_index), macsp),
|
||||
),
|
||||
ty,
|
||||
)
|
||||
} else {
|
||||
DummyResult::raw_expr(macsp, true)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
ecx.expr_addr_of(
|
||||
macsp,
|
||||
ecx.expr_match(
|
||||
macsp,
|
||||
ecx.expr_tuple(
|
||||
macsp,
|
||||
arguments
|
||||
.into_iter()
|
||||
.map(|arg| {
|
||||
ecx.expr_addr_of(arg.expr.span.with_ctxt(macsp.ctxt()), arg.expr)
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
vec![ecx.arm(macsp, ecx.pat_ident(macsp, args_ident), ecx.expr_array(macsp, args))],
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
if let Some(format_options) = format_options {
|
||||
// Generate:
|
||||
// ::core::fmt::Arguments::new_v1_formatted(
|
||||
// lit_pieces,
|
||||
// args,
|
||||
// format_options,
|
||||
// unsafe { ::core::fmt::UnsafeArg::new() }
|
||||
// )
|
||||
ecx.expr_call_global(
|
||||
macsp,
|
||||
ecx.std_path(&[sym::fmt, sym::Arguments, sym::new_v1_formatted]),
|
||||
vec![
|
||||
lit_pieces,
|
||||
args,
|
||||
format_options,
|
||||
ecx.expr_block(P(ast::Block {
|
||||
stmts: vec![ecx.stmt_expr(ecx.expr_call_global(
|
||||
macsp,
|
||||
ecx.std_path(&[sym::fmt, sym::UnsafeArg, sym::new]),
|
||||
Vec::new(),
|
||||
))],
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
rules: BlockCheckMode::Unsafe(UnsafeSource::CompilerGenerated),
|
||||
span: macsp,
|
||||
tokens: None,
|
||||
could_be_bare_literal: false,
|
||||
})),
|
||||
],
|
||||
)
|
||||
} else {
|
||||
// Generate:
|
||||
// ::core::fmt::Arguments::new_v1(
|
||||
// lit_pieces,
|
||||
// args,
|
||||
// )
|
||||
ecx.expr_call_global(
|
||||
macsp,
|
||||
ecx.std_path(&[sym::fmt, sym::Arguments, sym::new_v1]),
|
||||
vec![lit_pieces, args],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn may_contain_yield_point(e: &ast::Expr) -> bool {
|
||||
struct MayContainYieldPoint(bool);
|
||||
|
||||
impl Visitor<'_> for MayContainYieldPoint {
|
||||
fn visit_expr(&mut self, e: &ast::Expr) {
|
||||
if let ast::ExprKind::Await(_) | ast::ExprKind::Yield(_) = e.kind {
|
||||
self.0 = true;
|
||||
} else {
|
||||
visit::walk_expr(self, e);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_mac_call(&mut self, _: &ast::MacCall) {
|
||||
self.0 = true;
|
||||
}
|
||||
|
||||
fn visit_attribute(&mut self, _: &ast::Attribute) {
|
||||
// Conservatively assume this may be a proc macro attribute in
|
||||
// expression position.
|
||||
self.0 = true;
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, _: &ast::Item) {
|
||||
// Do not recurse into nested items.
|
||||
}
|
||||
}
|
||||
|
||||
let mut visitor = MayContainYieldPoint(false);
|
||||
visitor.visit_expr(e);
|
||||
visitor.0
|
||||
}
|
|
@ -7,9 +7,9 @@
|
|||
#![feature(box_patterns)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(is_some_and)]
|
||||
#![feature(is_sorted)]
|
||||
#![feature(let_chains)]
|
||||
#![cfg_attr(bootstrap, feature(let_else))]
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(proc_macro_quote)]
|
||||
#![recursion_limit = "256"]
|
||||
|
|
|
@ -115,7 +115,7 @@ pub fn expand_test_or_bench(
|
|||
// reworked in the future to not need it, it'd be nice.
|
||||
_ => diag.struct_span_err(attr_sp, msg).forget_guarantee(),
|
||||
};
|
||||
err.span_label(attr_sp, "the `#[test]` macro causes a a function to be run on a test and has no effect on non-functions")
|
||||
err.span_label(attr_sp, "the `#[test]` macro causes a function to be run on a test and has no effect on non-functions")
|
||||
.span_label(item.span, format!("expected a non-associated function, found {} {}", item.kind.article(), item.kind.descr()))
|
||||
.span_suggestion(attr_sp, "replace with conditional compilation to make the item only exist when tests are being run", "#[cfg(test)]", Applicability::MaybeIncorrect)
|
||||
.emit();
|
||||
|
|
|
@ -65,7 +65,7 @@ pub(crate) fn get_file_name(crate_name: &str, crate_type: &str) -> String {
|
|||
}
|
||||
|
||||
/// Similar to `get_file_name`, but converts any dashes (`-`) in the `crate_name` to
|
||||
/// underscores (`_`). This is specially made for the the rustc and cargo wrappers
|
||||
/// underscores (`_`). This is specially made for the rustc and cargo wrappers
|
||||
/// which have a dash in the name, and that is not allowed in a crate name.
|
||||
pub(crate) fn get_wrapper_file_name(crate_name: &str, crate_type: &str) -> String {
|
||||
let crate_name = crate_name.replace('-', "_");
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#![feature(core_intrinsics, generators, generator_trait, is_sorted, bench_black_box)]
|
||||
#![feature(core_intrinsics, generators, generator_trait, is_sorted)]
|
||||
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
use std::arch::x86_64::*;
|
||||
|
|
|
@ -193,7 +193,7 @@ pub(super) fn from_casted_value<'tcx>(
|
|||
kind: StackSlotKind::ExplicitSlot,
|
||||
// FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
|
||||
// specify stack slot alignment.
|
||||
// Stack slot size may be bigger for for example `[u8; 3]` which is packed into an `i32`.
|
||||
// Stack slot size may be bigger for example `[u8; 3]` which is packed into an `i32`.
|
||||
// It may also be smaller for example when the type is a wrapper around an integer with a
|
||||
// larger alignment than the integer.
|
||||
size: (std::cmp::max(abi_param_size, layout_size) + 15) / 16 * 16,
|
||||
|
|
|
@ -633,7 +633,12 @@ fn codegen_stmt<'tcx>(
|
|||
lval.write_cvalue(fx, operand.cast_pointer_to(to_layout));
|
||||
}
|
||||
Rvalue::Cast(
|
||||
CastKind::Misc
|
||||
CastKind::IntToInt
|
||||
| CastKind::FloatToFloat
|
||||
| CastKind::FloatToInt
|
||||
| CastKind::IntToFloat
|
||||
| CastKind::FnPtrToPtr
|
||||
| CastKind::PtrToPtr
|
||||
| CastKind::PointerExposeAddress
|
||||
| CastKind::PointerFromExposedAddress,
|
||||
ref operand,
|
||||
|
|
|
@ -5,7 +5,6 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
|||
use rustc_middle::mir::interpret::{
|
||||
read_target_uint, AllocId, ConstAllocation, ConstValue, ErrorHandled, GlobalAlloc, Scalar,
|
||||
};
|
||||
use rustc_middle::ty::ConstKind;
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
||||
use cranelift_codegen::ir::GlobalValueData;
|
||||
|
@ -42,15 +41,7 @@ pub(crate) fn check_constants(fx: &mut FunctionCx<'_, '_, '_>) -> bool {
|
|||
let mut all_constants_ok = true;
|
||||
for constant in &fx.mir.required_consts {
|
||||
let unevaluated = match fx.monomorphize(constant.literal) {
|
||||
ConstantKind::Ty(ct) => match ct.kind() {
|
||||
ConstKind::Unevaluated(uv) => uv.expand(),
|
||||
ConstKind::Value(_) => continue,
|
||||
ConstKind::Param(_)
|
||||
| ConstKind::Infer(_)
|
||||
| ConstKind::Bound(_, _)
|
||||
| ConstKind::Placeholder(_)
|
||||
| ConstKind::Error(_) => unreachable!("{:?}", ct),
|
||||
},
|
||||
ConstantKind::Ty(_) => unreachable!(),
|
||||
ConstantKind::Unevaluated(uv, _) => uv,
|
||||
ConstantKind::Val(..) => continue,
|
||||
};
|
||||
|
@ -118,7 +109,7 @@ pub(crate) fn codegen_constant<'tcx>(
|
|||
) -> CValue<'tcx> {
|
||||
let (const_val, ty) = match fx.monomorphize(constant.literal) {
|
||||
ConstantKind::Ty(const_) => unreachable!("{:?}", const_),
|
||||
ConstantKind::Unevaluated(ty::Unevaluated { def, substs, promoted }, ty)
|
||||
ConstantKind::Unevaluated(mir::UnevaluatedConst { def, substs, promoted }, ty)
|
||||
if fx.tcx.is_static(def.did) =>
|
||||
{
|
||||
assert!(substs.is_empty());
|
||||
|
@ -499,7 +490,16 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
|
|||
match &stmt.kind {
|
||||
StatementKind::Assign(local_and_rvalue) if &local_and_rvalue.0 == place => {
|
||||
match &local_and_rvalue.1 {
|
||||
Rvalue::Cast(CastKind::Misc, operand, ty) => {
|
||||
Rvalue::Cast(
|
||||
CastKind::IntToInt
|
||||
| CastKind::FloatToFloat
|
||||
| CastKind::FloatToInt
|
||||
| CastKind::IntToFloat
|
||||
| CastKind::FnPtrToPtr
|
||||
| CastKind::PtrToPtr,
|
||||
operand,
|
||||
ty,
|
||||
) => {
|
||||
if computed_const_val.is_some() {
|
||||
return None; // local assigned twice
|
||||
}
|
||||
|
|
|
@ -11,10 +11,6 @@ use crate::intrinsic::ArgAbiExt;
|
|||
use crate::type_of::LayoutGccExt;
|
||||
|
||||
impl<'a, 'gcc, 'tcx> AbiBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
fn apply_attrs_callsite(&mut self, _fn_abi: &FnAbi<'tcx, Ty<'tcx>>, _callsite: Self::Value) {
|
||||
// TODO(antoyo)
|
||||
}
|
||||
|
||||
fn get_param(&mut self, index: usize) -> Self::Value {
|
||||
let func = self.current_func();
|
||||
let param = func.get_param(index as i32);
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use std::fs::File;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::errors::RanlibFailure;
|
||||
|
||||
use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
|
||||
use rustc_session::Session;
|
||||
|
||||
|
@ -181,7 +183,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
std::process::Command::new("ranlib").arg(output).status().expect("Couldn't run ranlib");
|
||||
|
||||
if !status.success() {
|
||||
self.config.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
|
||||
self.config.sess.emit_fatal(RanlibFailure::new(status.code()));
|
||||
}
|
||||
|
||||
any_members
|
||||
|
|
|
@ -12,6 +12,7 @@ use std::borrow::Cow;
|
|||
|
||||
use crate::builder::Builder;
|
||||
use crate::context::CodegenCx;
|
||||
use crate::errors::UnwindingInlineAsm;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
use crate::callee::get_fn;
|
||||
|
||||
|
@ -109,7 +110,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
fn codegen_inline_asm(&mut self, template: &[InlineAsmTemplatePiece], rust_operands: &[InlineAsmOperandRef<'tcx, Self>], options: InlineAsmOptions, span: &[Span], _instance: Instance<'_>, _dest_catch_funclet: Option<(Self::BasicBlock, Self::BasicBlock, Option<&Self::Funclet>)>) {
|
||||
if options.contains(InlineAsmOptions::MAY_UNWIND) {
|
||||
self.sess()
|
||||
.struct_span_err(span[0], "GCC backend does not support unwinding from inline asm")
|
||||
.create_err(UnwindingInlineAsm { span: span[0] })
|
||||
.emit();
|
||||
return;
|
||||
}
|
||||
|
@ -497,7 +498,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
if options.contains(InlineAsmOptions::NORETURN) {
|
||||
let builtin_unreachable = self.context.get_builtin_function("__builtin_unreachable");
|
||||
let builtin_unreachable: RValue<'gcc> = unsafe { std::mem::transmute(builtin_unreachable) };
|
||||
self.call(self.type_void(), builtin_unreachable, &[], None);
|
||||
self.call(self.type_void(), None, builtin_unreachable, &[], None);
|
||||
}
|
||||
|
||||
// Write results to outputs.
|
||||
|
|
|
@ -444,11 +444,23 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
self.block.end_with_switch(None, value, default_block, &gcc_cases);
|
||||
}
|
||||
|
||||
fn invoke(&mut self, typ: Type<'gcc>, func: RValue<'gcc>, args: &[RValue<'gcc>], then: Block<'gcc>, catch: Block<'gcc>, _funclet: Option<&Funclet>) -> RValue<'gcc> {
|
||||
fn invoke(
|
||||
&mut self,
|
||||
typ: Type<'gcc>,
|
||||
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
|
||||
func: RValue<'gcc>,
|
||||
args: &[RValue<'gcc>],
|
||||
then: Block<'gcc>,
|
||||
catch: Block<'gcc>,
|
||||
_funclet: Option<&Funclet>,
|
||||
) -> RValue<'gcc> {
|
||||
// TODO(bjorn3): Properly implement unwinding.
|
||||
let call_site = self.call(typ, func, args, None);
|
||||
let call_site = self.call(typ, None, func, args, None);
|
||||
let condition = self.context.new_rvalue_from_int(self.bool_type, 1);
|
||||
self.llbb().end_with_conditional(None, condition, then, catch);
|
||||
if let Some(_fn_abi) = fn_abi {
|
||||
// TODO(bjorn3): Apply function attributes
|
||||
}
|
||||
call_site
|
||||
}
|
||||
|
||||
|
@ -643,11 +655,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
self.current_func().new_local(None, aligned_type, &format!("stack_var_{}", self.stack_var_count.get())).get_address(None)
|
||||
}
|
||||
|
||||
fn dynamic_alloca(&mut self, _ty: Type<'gcc>, _align: Align) -> RValue<'gcc> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn array_alloca(&mut self, _ty: Type<'gcc>, _len: RValue<'gcc>, _align: Align) -> RValue<'gcc> {
|
||||
fn byte_array_alloca(&mut self, _len: RValue<'gcc>, _align: Align) -> RValue<'gcc> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
|
@ -1227,16 +1235,27 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
// TODO(antoyo)
|
||||
}
|
||||
|
||||
fn call(&mut self, _typ: Type<'gcc>, func: RValue<'gcc>, args: &[RValue<'gcc>], funclet: Option<&Funclet>) -> RValue<'gcc> {
|
||||
fn call(
|
||||
&mut self,
|
||||
_typ: Type<'gcc>,
|
||||
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
|
||||
func: RValue<'gcc>,
|
||||
args: &[RValue<'gcc>],
|
||||
funclet: Option<&Funclet>,
|
||||
) -> RValue<'gcc> {
|
||||
// FIXME(antoyo): remove when having a proper API.
|
||||
let gcc_func = unsafe { std::mem::transmute(func) };
|
||||
if self.functions.borrow().values().find(|value| **value == gcc_func).is_some() {
|
||||
let call = if self.functions.borrow().values().find(|value| **value == gcc_func).is_some() {
|
||||
self.function_call(func, args, funclet)
|
||||
}
|
||||
else {
|
||||
// If it's a not function that was defined, it's a function pointer.
|
||||
self.function_ptr_call(func, args, funclet)
|
||||
};
|
||||
if let Some(_fn_abi) = fn_abi {
|
||||
// TODO(bjorn3): Apply function attributes
|
||||
}
|
||||
call
|
||||
}
|
||||
|
||||
fn zext(&mut self, value: RValue<'gcc>, dest_typ: Type<'gcc>) -> RValue<'gcc> {
|
||||
|
|
|
@ -14,6 +14,7 @@ use rustc_target::abi::{self, Align, HasDataLayout, Primitive, Size, WrappingRan
|
|||
|
||||
use crate::base;
|
||||
use crate::context::CodegenCx;
|
||||
use crate::errors::LinkageConstOrMutType;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
|
||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
|
@ -368,10 +369,7 @@ fn check_and_apply_linkage<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, attrs: &Codeg
|
|||
cx.layout_of(mt.ty).gcc_type(cx, true)
|
||||
}
|
||||
else {
|
||||
cx.sess().span_fatal(
|
||||
span,
|
||||
"must have type `*const T` or `*mut T` due to `#[linkage]` attribute",
|
||||
)
|
||||
cx.sess().emit_fatal(LinkageConstOrMutType { span: span })
|
||||
};
|
||||
// Declare a symbol `foo` with the desired linkage.
|
||||
let global1 = cx.declare_global_with_linkage(&sym, llty2, base::global_linkage_to_gcc(linkage));
|
||||
|
|
|
@ -13,7 +13,7 @@ use rustc_middle::mir::mono::CodegenUnit;
|
|||
use rustc_middle::ty::{self, Instance, ParamEnv, PolyExistentialTraitRef, Ty, TyCtxt};
|
||||
use rustc_middle::ty::layout::{FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError, TyAndLayout, LayoutOfHelpers};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::Span;
|
||||
use rustc_span::{Span, source_map::respan};
|
||||
use rustc_target::abi::{call::FnAbi, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx};
|
||||
use rustc_target::spec::{HasTargetSpec, Target, TlsModel};
|
||||
|
||||
|
@ -293,7 +293,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
|||
self.is_native_int_type(typ) || self.is_non_native_int_type(typ) || typ.is_compatible_with(self.bool_type)
|
||||
}
|
||||
|
||||
pub fn sess(&self) -> &Session {
|
||||
pub fn sess(&self) -> &'tcx Session {
|
||||
&self.tcx.sess
|
||||
}
|
||||
|
||||
|
@ -416,10 +416,6 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
|||
self.codegen_unit
|
||||
}
|
||||
|
||||
fn used_statics(&self) -> &RefCell<Vec<RValue<'gcc>>> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn set_frame_pointer_type(&self, _llfn: RValue<'gcc>) {
|
||||
// TODO(antoyo)
|
||||
}
|
||||
|
@ -428,10 +424,6 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
|||
// TODO(antoyo)
|
||||
}
|
||||
|
||||
fn create_used_variable(&self) {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
|
||||
if self.get_declared_value("main").is_none() {
|
||||
Some(self.declare_cfn("main", fn_type))
|
||||
|
@ -443,14 +435,6 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn compiler_used_statics(&self) -> &RefCell<Vec<RValue<'gcc>>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn create_compiler_used_variable(&self) {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> HasTyCtxt<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
|
@ -477,7 +461,7 @@ impl<'gcc, 'tcx> LayoutOfHelpers<'tcx> for CodegenCx<'gcc, 'tcx> {
|
|||
#[inline]
|
||||
fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! {
|
||||
if let LayoutError::SizeOverflow(_) = err {
|
||||
self.sess().span_fatal(span, &err.to_string())
|
||||
self.sess().emit_fatal(respan(span, err))
|
||||
} else {
|
||||
span_bug!(span, "failed to get layout for `{}`: {}", ty, err)
|
||||
}
|
||||
|
@ -495,7 +479,7 @@ impl<'gcc, 'tcx> FnAbiOfHelpers<'tcx> for CodegenCx<'gcc, 'tcx> {
|
|||
fn_abi_request: FnAbiRequest<'tcx>,
|
||||
) -> ! {
|
||||
if let FnAbiError::Layout(LayoutError::SizeOverflow(_)) = err {
|
||||
self.sess().span_fatal(span, &err.to_string())
|
||||
self.sess().emit_fatal(respan(span, err))
|
||||
} else {
|
||||
match fn_abi_request {
|
||||
FnAbiRequest::OfFnPtr { sig, extra_args } => {
|
||||
|
|
242
compiler/rustc_codegen_gcc/src/errors.rs
Normal file
242
compiler/rustc_codegen_gcc/src/errors.rs
Normal file
|
@ -0,0 +1,242 @@
|
|||
use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg};
|
||||
use rustc_macros::Diagnostic;
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_span::{Span, Symbol};
|
||||
use std::borrow::Cow;
|
||||
|
||||
struct ExitCode(Option<i32>);
|
||||
|
||||
impl IntoDiagnosticArg for ExitCode {
|
||||
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
||||
let ExitCode(exit_code) = self;
|
||||
match exit_code {
|
||||
Some(t) => t.into_diagnostic_arg(),
|
||||
None => DiagnosticArgValue::Str(Cow::Borrowed("<signal>")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::ranlib_failure)]
|
||||
pub(crate) struct RanlibFailure {
|
||||
exit_code: ExitCode,
|
||||
}
|
||||
|
||||
impl RanlibFailure {
|
||||
pub fn new(exit_code: Option<i32>) -> Self {
|
||||
RanlibFailure { exit_code: ExitCode(exit_code) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_basic_integer, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationBasicInteger<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_invalid_float_vector, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationInvalidFloatVector<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub elem_ty: &'a str,
|
||||
pub vec_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_not_float, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationNotFloat<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_unrecognized, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationUnrecognized {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_expected_signed_unsigned, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationExpectedSignedUnsigned<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub elem_ty: Ty<'a>,
|
||||
pub vec_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_unsupported_element, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationUnsupportedElement<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub elem_ty: Ty<'a>,
|
||||
pub ret_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_invalid_bitmask, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationInvalidBitmask<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub ty: Ty<'a>,
|
||||
pub expected_int_bits: u64,
|
||||
pub expected_bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_simd_shuffle, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationSimdShuffle<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_expected_simd, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationExpectedSimd<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub position: &'a str,
|
||||
pub found_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_mask_type, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationMaskType<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_return_length, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationReturnLength<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_len: u64,
|
||||
pub ret_ty: Ty<'a>,
|
||||
pub out_len: u64,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_return_length_input_type, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationReturnLengthInputType<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_len: u64,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub ret_ty: Ty<'a>,
|
||||
pub out_len: u64,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_return_element, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationReturnElement<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_elem: Ty<'a>,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub ret_ty: Ty<'a>,
|
||||
pub out_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_return_type, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationReturnType<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_elem: Ty<'a>,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub ret_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_inserted_type, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationInsertedType<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_elem: Ty<'a>,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub out_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_return_integer_type, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationReturnIntegerType<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub ret_ty: Ty<'a>,
|
||||
pub out_ty: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_mismatched_lengths, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationMismatchedLengths {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub m_len: u64,
|
||||
pub v_len: u64,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_unsupported_cast, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationUnsupportedCast<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub in_elem: Ty<'a>,
|
||||
pub ret_ty: Ty<'a>,
|
||||
pub out_elem: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::invalid_monomorphization_unsupported_operation, code = "E0511")]
|
||||
pub(crate) struct InvalidMonomorphizationUnsupportedOperation<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
pub in_ty: Ty<'a>,
|
||||
pub in_elem: Ty<'a>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::linkage_const_or_mut_type)]
|
||||
pub(crate) struct LinkageConstOrMutType {
|
||||
#[primary_span]
|
||||
pub span: Span
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::lto_not_supported)]
|
||||
pub(crate) struct LTONotSupported;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc::unwinding_inline_asm)]
|
||||
pub(crate) struct UnwindingInlineAsm {
|
||||
#[primary_span]
|
||||
pub span: Span
|
||||
}
|
|
@ -4,7 +4,7 @@ mod simd;
|
|||
use gccjit::{ComparisonOp, Function, RValue, ToRValue, Type, UnaryOp, FunctionType};
|
||||
use rustc_codegen_ssa::MemFlags;
|
||||
use rustc_codegen_ssa::base::wants_msvc_seh;
|
||||
use rustc_codegen_ssa::common::{IntPredicate, span_invalid_monomorphization_error};
|
||||
use rustc_codegen_ssa::common::IntPredicate;
|
||||
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
|
||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||
use rustc_codegen_ssa::traits::{ArgAbiMethods, BaseTypeMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods};
|
||||
|
@ -20,6 +20,7 @@ use crate::abi::GccType;
|
|||
use crate::builder::Builder;
|
||||
use crate::common::{SignType, TypeReflection};
|
||||
use crate::context::CodegenCx;
|
||||
use crate::errors::InvalidMonomorphizationBasicInteger;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
use crate::intrinsic::simd::generic_simd_intrinsic;
|
||||
|
||||
|
@ -99,7 +100,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
_ if simple.is_some() => {
|
||||
// FIXME(antoyo): remove this cast when the API supports function.
|
||||
let func = unsafe { std::mem::transmute(simple.expect("simple")) };
|
||||
self.call(self.type_void(), func, &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), None)
|
||||
self.call(self.type_void(), None, func, &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), None)
|
||||
},
|
||||
sym::likely => {
|
||||
self.expect(args[0].immediate(), true)
|
||||
|
@ -242,15 +243,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
_ => bug!(),
|
||||
},
|
||||
None => {
|
||||
span_invalid_monomorphization_error(
|
||||
tcx.sess,
|
||||
span,
|
||||
&format!(
|
||||
"invalid monomorphization of `{}` intrinsic: \
|
||||
expected basic integer type, found `{}`",
|
||||
name, ty
|
||||
),
|
||||
);
|
||||
tcx.sess.emit_err(InvalidMonomorphizationBasicInteger { span, name, ty });
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -348,7 +341,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
fn abort(&mut self) {
|
||||
let func = self.context.get_builtin_function("abort");
|
||||
let func: RValue<'gcc> = unsafe { std::mem::transmute(func) };
|
||||
self.call(self.type_void(), func, &[], None);
|
||||
self.call(self.type_void(), None, func, &[], None);
|
||||
}
|
||||
|
||||
fn assume(&mut self, value: Self::Value) {
|
||||
|
@ -1131,7 +1124,7 @@ fn try_intrinsic<'gcc, 'tcx>(bx: &mut Builder<'_, 'gcc, 'tcx>, try_func: RValue<
|
|||
// NOTE: the `|| true` here is to use the panic=abort strategy with panic=unwind too
|
||||
if bx.sess().panic_strategy() == PanicStrategy::Abort || true {
|
||||
// TODO(bjorn3): Properly implement unwinding and remove the `|| true` once this is done.
|
||||
bx.call(bx.type_void(), try_func, &[data], None);
|
||||
bx.call(bx.type_void(), None, try_func, &[data], None);
|
||||
// Return 0 unconditionally from the intrinsic call;
|
||||
// we can never unwind.
|
||||
let ret_align = bx.tcx.data_layout.i32_align.abi;
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::cmp::Ordering;
|
|||
|
||||
use gccjit::{BinaryOp, RValue, Type, ToRValue};
|
||||
use rustc_codegen_ssa::base::compare_simd_types;
|
||||
use rustc_codegen_ssa::common::{TypeKind, span_invalid_monomorphization_error};
|
||||
use rustc_codegen_ssa::common::TypeKind;
|
||||
use rustc_codegen_ssa::mir::operand::OperandRef;
|
||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, BuilderMethods};
|
||||
|
@ -14,43 +14,48 @@ use rustc_span::{Span, Symbol, sym};
|
|||
use rustc_target::abi::Align;
|
||||
|
||||
use crate::builder::Builder;
|
||||
use crate::errors::{
|
||||
InvalidMonomorphizationInvalidFloatVector,
|
||||
InvalidMonomorphizationNotFloat,
|
||||
InvalidMonomorphizationUnrecognized,
|
||||
InvalidMonomorphizationExpectedSignedUnsigned,
|
||||
InvalidMonomorphizationUnsupportedElement,
|
||||
InvalidMonomorphizationInvalidBitmask,
|
||||
InvalidMonomorphizationSimdShuffle,
|
||||
InvalidMonomorphizationExpectedSimd,
|
||||
InvalidMonomorphizationMaskType,
|
||||
InvalidMonomorphizationReturnLength,
|
||||
InvalidMonomorphizationReturnLengthInputType,
|
||||
InvalidMonomorphizationReturnElement,
|
||||
InvalidMonomorphizationReturnType,
|
||||
InvalidMonomorphizationInsertedType,
|
||||
InvalidMonomorphizationReturnIntegerType,
|
||||
InvalidMonomorphizationMismatchedLengths,
|
||||
InvalidMonomorphizationUnsupportedCast,
|
||||
InvalidMonomorphizationUnsupportedOperation
|
||||
};
|
||||
use crate::intrinsic;
|
||||
|
||||
pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>, name: Symbol, callee_ty: Ty<'tcx>, args: &[OperandRef<'tcx, RValue<'gcc>>], ret_ty: Ty<'tcx>, llret_ty: Type<'gcc>, span: Span) -> Result<RValue<'gcc>, ()> {
|
||||
// macros for error handling:
|
||||
#[allow(unused_macro_rules)]
|
||||
macro_rules! emit_error {
|
||||
($msg: tt) => {
|
||||
emit_error!($msg, )
|
||||
};
|
||||
($msg: tt, $($fmt: tt)*) => {
|
||||
span_invalid_monomorphization_error(
|
||||
bx.sess(), span,
|
||||
&format!(concat!("invalid monomorphization of `{}` intrinsic: ", $msg),
|
||||
name, $($fmt)*));
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! return_error {
|
||||
($($fmt: tt)*) => {
|
||||
($err:expr) => {
|
||||
{
|
||||
emit_error!($($fmt)*);
|
||||
bx.sess().emit_err($err);
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! require {
|
||||
($cond: expr, $($fmt: tt)*) => {
|
||||
($cond:expr, $err:expr) => {
|
||||
if !$cond {
|
||||
return_error!($($fmt)*);
|
||||
return_error!($err);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! require_simd {
|
||||
($ty: expr, $position: expr) => {
|
||||
require!($ty.is_simd(), "expected SIMD {} type, found non-SIMD `{}`", $position, $ty)
|
||||
require!($ty.is_simd(), InvalidMonomorphizationExpectedSimd { span, name, position: $position, found_ty: $ty })
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -82,10 +87,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
bx.load(int_ty, ptr, Align::ONE)
|
||||
}
|
||||
_ => return_error!(
|
||||
"invalid bitmask `{}`, expected `u{}` or `[u8; {}]`",
|
||||
mask_ty,
|
||||
expected_int_bits,
|
||||
expected_bytes
|
||||
InvalidMonomorphizationInvalidBitmask { span, name, ty: mask_ty, expected_int_bits, expected_bytes }
|
||||
),
|
||||
};
|
||||
|
||||
|
@ -127,18 +129,11 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx());
|
||||
require!(
|
||||
in_len == out_len,
|
||||
"expected return type with length {} (same as input type `{}`), \
|
||||
found `{}` with length {}",
|
||||
in_len,
|
||||
in_ty,
|
||||
ret_ty,
|
||||
out_len
|
||||
InvalidMonomorphizationReturnLengthInputType { span, name, in_len, in_ty, ret_ty, out_len }
|
||||
);
|
||||
require!(
|
||||
bx.type_kind(bx.element_type(llret_ty)) == TypeKind::Integer,
|
||||
"expected return type with integer elements, found `{}` with non-integer `{}`",
|
||||
ret_ty,
|
||||
out_ty
|
||||
InvalidMonomorphizationReturnIntegerType {span, name, ret_ty, out_ty}
|
||||
);
|
||||
|
||||
return Ok(compare_simd_types(
|
||||
|
@ -163,8 +158,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
})
|
||||
}
|
||||
_ => return_error!(
|
||||
"simd_shuffle index must be an array of `u32`, got `{}`",
|
||||
args[2].layout.ty
|
||||
InvalidMonomorphizationSimdShuffle { span, name, ty: args[2].layout.ty }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -179,19 +173,11 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx());
|
||||
require!(
|
||||
out_len == n,
|
||||
"expected return type of length {}, found `{}` with length {}",
|
||||
n,
|
||||
ret_ty,
|
||||
out_len
|
||||
InvalidMonomorphizationReturnLength { span, name, in_len: n, ret_ty, out_len }
|
||||
);
|
||||
require!(
|
||||
in_elem == out_ty,
|
||||
"expected return element type `{}` (element of input `{}`), \
|
||||
found `{}` with element type `{}`",
|
||||
in_elem,
|
||||
in_ty,
|
||||
ret_ty,
|
||||
out_ty
|
||||
InvalidMonomorphizationReturnElement { span, name, in_elem, in_ty, ret_ty, out_ty }
|
||||
);
|
||||
|
||||
let vector = args[2].immediate();
|
||||
|
@ -207,10 +193,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
if name == sym::simd_insert {
|
||||
require!(
|
||||
in_elem == arg_tys[2],
|
||||
"expected inserted type `{}` (element of input `{}`), found `{}`",
|
||||
in_elem,
|
||||
in_ty,
|
||||
arg_tys[2]
|
||||
InvalidMonomorphizationInsertedType { span, name, in_elem, in_ty, out_ty: arg_tys[2] }
|
||||
);
|
||||
let vector = args[0].immediate();
|
||||
let index = args[1].immediate();
|
||||
|
@ -263,10 +246,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
if name == sym::simd_extract {
|
||||
require!(
|
||||
ret_ty == in_elem,
|
||||
"expected return type `{}` (element of input `{}`), found `{}`",
|
||||
in_elem,
|
||||
in_ty,
|
||||
ret_ty
|
||||
InvalidMonomorphizationReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||
);
|
||||
let vector = args[0].immediate();
|
||||
return Ok(bx.context.new_vector_access(None, vector, args[1].immediate()).to_rvalue());
|
||||
|
@ -279,13 +259,11 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
let (v_len, _) = arg_tys[1].simd_size_and_type(bx.tcx());
|
||||
require!(
|
||||
m_len == v_len,
|
||||
"mismatched lengths: mask length `{}` != other vector length `{}`",
|
||||
m_len,
|
||||
v_len
|
||||
InvalidMonomorphizationMismatchedLengths { span, name, m_len, v_len }
|
||||
);
|
||||
match m_elem_ty.kind() {
|
||||
ty::Int(_) => {}
|
||||
_ => return_error!("mask element type is `{}`, expected `i_`", m_elem_ty),
|
||||
_ => return_error!(InvalidMonomorphizationMaskType { span, name, ty: m_elem_ty }),
|
||||
}
|
||||
return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate()));
|
||||
}
|
||||
|
@ -295,12 +273,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx());
|
||||
require!(
|
||||
in_len == out_len,
|
||||
"expected return type with length {} (same as input type `{}`), \
|
||||
found `{}` with length {}",
|
||||
in_len,
|
||||
in_ty,
|
||||
ret_ty,
|
||||
out_len
|
||||
InvalidMonomorphizationReturnLengthInputType { span, name, in_len, in_ty, ret_ty, out_len }
|
||||
);
|
||||
// casting cares about nominal type, not just structural type
|
||||
if in_elem == out_elem {
|
||||
|
@ -412,13 +385,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
}
|
||||
_ => { /* Unsupported. Fallthrough. */ }
|
||||
}
|
||||
require!(
|
||||
false,
|
||||
"unsupported cast from `{}` with element `{}` to `{}` with element `{}`",
|
||||
in_ty,
|
||||
in_elem,
|
||||
ret_ty,
|
||||
out_elem
|
||||
return_error!(
|
||||
InvalidMonomorphizationUnsupportedCast { span, name, in_ty, in_elem, ret_ty, out_elem }
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -431,10 +399,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
})*
|
||||
_ => {},
|
||||
}
|
||||
require!(false,
|
||||
"unsupported operation on `{}` with element `{}`",
|
||||
in_ty,
|
||||
in_elem)
|
||||
return_error!(InvalidMonomorphizationUnsupportedOperation { span, name, in_ty, in_elem })
|
||||
})*
|
||||
}
|
||||
}
|
||||
|
@ -448,23 +413,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
span: Span,
|
||||
args: &[OperandRef<'tcx, RValue<'gcc>>],
|
||||
) -> Result<RValue<'gcc>, ()> {
|
||||
macro_rules! emit_error {
|
||||
($msg: tt, $($fmt: tt)*) => {
|
||||
span_invalid_monomorphization_error(
|
||||
bx.sess(), span,
|
||||
&format!(concat!("invalid monomorphization of `{}` intrinsic: ", $msg),
|
||||
name, $($fmt)*));
|
||||
}
|
||||
}
|
||||
macro_rules! return_error {
|
||||
($($fmt: tt)*) => {
|
||||
($err:expr) => {
|
||||
{
|
||||
emit_error!($($fmt)*);
|
||||
bx.sess().emit_err($err);
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let (elem_ty_str, elem_ty) =
|
||||
if let ty::Float(f) = in_elem.kind() {
|
||||
let elem_ty = bx.cx.type_float_from_ty(*f);
|
||||
|
@ -472,16 +428,12 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
32 => ("f32", elem_ty),
|
||||
64 => ("f64", elem_ty),
|
||||
_ => {
|
||||
return_error!(
|
||||
"unsupported element type `{}` of floating-point vector `{}`",
|
||||
f.name_str(),
|
||||
in_ty
|
||||
);
|
||||
return_error!(InvalidMonomorphizationInvalidFloatVector { span, name, elem_ty: f.name_str(), vec_ty: in_ty });
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
return_error!("`{}` is not a floating-point type", in_ty);
|
||||
return_error!(InvalidMonomorphizationNotFloat { span, name, ty: in_ty });
|
||||
};
|
||||
|
||||
let vec_ty = bx.cx.type_vector(elem_ty, in_len);
|
||||
|
@ -504,12 +456,12 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
sym::simd_fsqrt => ("sqrt", bx.type_func(&[vec_ty], vec_ty)),
|
||||
sym::simd_round => ("round", bx.type_func(&[vec_ty], vec_ty)),
|
||||
sym::simd_trunc => ("trunc", bx.type_func(&[vec_ty], vec_ty)),
|
||||
_ => return_error!("unrecognized intrinsic `{}`", name),
|
||||
_ => return_error!(InvalidMonomorphizationUnrecognized { span, name })
|
||||
};
|
||||
let llvm_name = &format!("llvm.{0}.v{1}{2}", intr_name, in_len, elem_ty_str);
|
||||
let function = intrinsic::llvm::intrinsic(llvm_name, &bx.cx);
|
||||
let function: RValue<'gcc> = unsafe { std::mem::transmute(function) };
|
||||
let c = bx.call(fn_ty, function, &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), None);
|
||||
let c = bx.call(fn_ty, None, function, &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), None);
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
|
@ -557,10 +509,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
})*
|
||||
_ => {},
|
||||
}
|
||||
require!(false,
|
||||
"unsupported operation on `{}` with element `{}`",
|
||||
in_ty,
|
||||
in_elem)
|
||||
return_error!(InvalidMonomorphizationUnsupportedOperation { span, name, in_ty, in_elem })
|
||||
})*
|
||||
}
|
||||
}
|
||||
|
@ -579,12 +528,12 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
ty::Int(i) => (true, i.bit_width().unwrap_or(ptr_bits), bx.cx.type_int_from_ty(i)),
|
||||
ty::Uint(i) => (false, i.bit_width().unwrap_or(ptr_bits), bx.cx.type_uint_from_ty(i)),
|
||||
_ => {
|
||||
return_error!(
|
||||
"expected element type `{}` of vector type `{}` \
|
||||
to be a signed or unsigned integer type",
|
||||
arg_tys[0].simd_size_and_type(bx.tcx()).1,
|
||||
arg_tys[0]
|
||||
);
|
||||
return_error!(InvalidMonomorphizationExpectedSignedUnsigned {
|
||||
span,
|
||||
name,
|
||||
elem_ty: arg_tys[0].simd_size_and_type(bx.tcx()).1,
|
||||
vec_ty: arg_tys[0],
|
||||
});
|
||||
}
|
||||
};
|
||||
let builtin_name =
|
||||
|
@ -617,10 +566,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
if name == sym::$name {
|
||||
require!(
|
||||
ret_ty == in_elem,
|
||||
"expected return type `{}` (element of input `{}`), found `{}`",
|
||||
in_elem,
|
||||
in_ty,
|
||||
ret_ty
|
||||
InvalidMonomorphizationReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||
);
|
||||
return match in_elem.kind() {
|
||||
ty::Int(_) | ty::Uint(_) => {
|
||||
|
@ -644,13 +590,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
Ok(bx.vector_reduce_op(args[0].immediate(), $vec_op))
|
||||
}
|
||||
}
|
||||
_ => return_error!(
|
||||
"unsupported {} from `{}` with element `{}` to `{}`",
|
||||
sym::$name,
|
||||
in_ty,
|
||||
in_elem,
|
||||
ret_ty
|
||||
),
|
||||
_ => return_error!(InvalidMonomorphizationUnsupportedElement { span, name, in_ty, elem_ty: in_elem, ret_ty }),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
@ -676,20 +616,11 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
if name == sym::$name {
|
||||
require!(
|
||||
ret_ty == in_elem,
|
||||
"expected return type `{}` (element of input `{}`), found `{}`",
|
||||
in_elem,
|
||||
in_ty,
|
||||
ret_ty
|
||||
InvalidMonomorphizationReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||
);
|
||||
return match in_elem.kind() {
|
||||
ty::Int(_) | ty::Uint(_) | ty::Float(_) => Ok(bx.$reduction(args[0].immediate())),
|
||||
_ => return_error!(
|
||||
"unsupported {} from `{}` with element `{}` to `{}`",
|
||||
sym::$name,
|
||||
in_ty,
|
||||
in_elem,
|
||||
ret_ty
|
||||
),
|
||||
_ => return_error!(InvalidMonomorphizationUnsupportedElement { span, name, in_ty, elem_ty: in_elem, ret_ty }),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
@ -704,22 +635,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
let input = if !$boolean {
|
||||
require!(
|
||||
ret_ty == in_elem,
|
||||
"expected return type `{}` (element of input `{}`), found `{}`",
|
||||
in_elem,
|
||||
in_ty,
|
||||
ret_ty
|
||||
InvalidMonomorphizationReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||
);
|
||||
args[0].immediate()
|
||||
} else {
|
||||
match in_elem.kind() {
|
||||
ty::Int(_) | ty::Uint(_) => {}
|
||||
_ => return_error!(
|
||||
"unsupported {} from `{}` with element `{}` to `{}`",
|
||||
sym::$name,
|
||||
in_ty,
|
||||
in_elem,
|
||||
ret_ty
|
||||
),
|
||||
_ => return_error!(InvalidMonomorphizationUnsupportedElement { span, name, in_ty, elem_ty: in_elem, ret_ty }),
|
||||
}
|
||||
|
||||
// boolean reductions operate on vectors of i1s:
|
||||
|
@ -733,11 +655,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
|
|||
Ok(if !$boolean { r } else { bx.zext(r, bx.type_bool()) })
|
||||
}
|
||||
_ => return_error!(
|
||||
"unsupported {} from `{}` with element `{}` to `{}`",
|
||||
sym::$name,
|
||||
in_ty,
|
||||
in_elem,
|
||||
ret_ty
|
||||
InvalidMonomorphizationUnsupportedElement { span, name, in_ty, elem_ty: in_elem, ret_ty }
|
||||
),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
#![recursion_limit="256"]
|
||||
#![warn(rust_2018_idioms)]
|
||||
#![warn(unused_lifetimes)]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
|
||||
extern crate rustc_apfloat;
|
||||
extern crate rustc_ast;
|
||||
|
@ -25,6 +27,7 @@ extern crate rustc_codegen_ssa;
|
|||
extern crate rustc_data_structures;
|
||||
extern crate rustc_errors;
|
||||
extern crate rustc_hir;
|
||||
extern crate rustc_macros;
|
||||
extern crate rustc_metadata;
|
||||
extern crate rustc_middle;
|
||||
extern crate rustc_session;
|
||||
|
@ -50,6 +53,7 @@ mod context;
|
|||
mod coverageinfo;
|
||||
mod debuginfo;
|
||||
mod declare;
|
||||
mod errors;
|
||||
mod int;
|
||||
mod intrinsic;
|
||||
mod mono_item;
|
||||
|
@ -59,6 +63,7 @@ mod type_of;
|
|||
use std::any::Any;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::errors::LTONotSupported;
|
||||
use gccjit::{Context, OptimizationLevel, CType};
|
||||
use rustc_ast::expand::allocator::AllocatorKind;
|
||||
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
||||
|
@ -97,7 +102,7 @@ pub struct GccCodegenBackend {
|
|||
impl CodegenBackend for GccCodegenBackend {
|
||||
fn init(&self, sess: &Session) {
|
||||
if sess.lto() != Lto::No {
|
||||
sess.warn("LTO is not supported. You may get a linker error.");
|
||||
sess.emit_warning(LTONotSupported {});
|
||||
}
|
||||
|
||||
let temp_dir = TempDir::new().expect("cannot create temporary directory");
|
||||
|
@ -166,15 +171,6 @@ impl ExtraBackendMethods for GccCodegenBackend {
|
|||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn target_cpu<'b>(&self, _sess: &'b Session) -> &'b str {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn tune_cpu<'b>(&self, _sess: &'b Session) -> Option<&'b str> {
|
||||
None
|
||||
// TODO(antoyo)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ModuleBuffer;
|
||||
|
@ -205,7 +201,6 @@ impl WriteBackendMethods for GccCodegenBackend {
|
|||
type Module = GccContext;
|
||||
type TargetMachine = ();
|
||||
type ModuleBuffer = ModuleBuffer;
|
||||
type Context = ();
|
||||
type ThinData = ();
|
||||
type ThinBuffer = ThinBuffer;
|
||||
|
||||
|
|
|
@ -3,11 +3,12 @@
|
|||
// Run-time:
|
||||
// status: 0
|
||||
|
||||
#![feature(asm_const, asm_sym)]
|
||||
#![feature(asm_const)]
|
||||
|
||||
use std::arch::{asm, global_asm};
|
||||
|
||||
global_asm!("
|
||||
global_asm!(
|
||||
"
|
||||
.global add_asm
|
||||
add_asm:
|
||||
mov rax, rdi
|
||||
|
@ -132,7 +133,9 @@ fn main() {
|
|||
assert_eq!(x, 43);
|
||||
|
||||
// check sym fn
|
||||
extern "C" fn foo() -> u64 { 42 }
|
||||
extern "C" fn foo() -> u64 {
|
||||
42
|
||||
}
|
||||
let x: u64;
|
||||
unsafe {
|
||||
asm!("call {}", sym foo, lateout("rax") x);
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
// Run-time:
|
||||
// status: 0
|
||||
|
||||
#![feature(bench_black_box, const_black_box, core_intrinsics, start)]
|
||||
#![feature(const_black_box, core_intrinsics, start)]
|
||||
|
||||
#![no_std]
|
||||
|
||||
|
|
|
@ -5,13 +5,11 @@ edition = "2021"
|
|||
|
||||
[lib]
|
||||
test = false
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
cstr = "0.2"
|
||||
libc = "0.2"
|
||||
libloading = "0.7.1"
|
||||
measureme = "10.0.0"
|
||||
object = { version = "0.29.0", default-features = false, features = ["std", "read_core", "archive", "coff", "elf", "macho", "pe"] }
|
||||
tracing = "0.1"
|
||||
|
@ -35,3 +33,4 @@ rustc_target = { path = "../rustc_target" }
|
|||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
tempfile = "3.2.0"
|
||||
|
|
|
@ -592,10 +592,6 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
|||
}
|
||||
|
||||
impl<'tcx> AbiBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
|
||||
fn apply_attrs_callsite(&mut self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>, callsite: Self::Value) {
|
||||
fn_abi.apply_attrs_callsite(self, callsite)
|
||||
}
|
||||
|
||||
fn get_param(&mut self, index: usize) -> Self::Value {
|
||||
llvm::get_param(self.llfn(), index as c_uint)
|
||||
}
|
||||
|
|
|
@ -430,9 +430,9 @@ pub(crate) fn inline_asm_call<'ll>(
|
|||
);
|
||||
|
||||
let call = if let Some((dest, catch, funclet)) = dest_catch_funclet {
|
||||
bx.invoke(fty, v, inputs, dest, catch, funclet)
|
||||
bx.invoke(fty, None, v, inputs, dest, catch, funclet)
|
||||
} else {
|
||||
bx.call(fty, v, inputs, None)
|
||||
bx.call(fty, None, v, inputs, None)
|
||||
};
|
||||
|
||||
// Store mark in a metadata node so we can map LLVM errors
|
||||
|
@ -551,6 +551,8 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
|
|||
format!("{{{}}}", reg.name())
|
||||
}
|
||||
}
|
||||
// The constraints can be retrieved from
|
||||
// https://llvm.org/docs/LangRef.html#supported-constraint-code-list
|
||||
InlineAsmRegOrRegClass::RegClass(reg) => match reg {
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => "r",
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg) => "w",
|
||||
|
@ -624,6 +626,8 @@ fn modifier_to_llvm(
|
|||
reg: InlineAsmRegClass,
|
||||
modifier: Option<char>,
|
||||
) -> Option<char> {
|
||||
// The modifiers can be retrieved from
|
||||
// https://llvm.org/docs/LangRef.html#asm-template-argument-modifiers
|
||||
match reg {
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => modifier,
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg)
|
||||
|
|
|
@ -2,16 +2,20 @@
|
|||
|
||||
use std::env;
|
||||
use std::ffi::{CStr, CString, OsString};
|
||||
use std::io;
|
||||
use std::fs;
|
||||
use std::io::{self, Write};
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::ptr;
|
||||
use std::str;
|
||||
|
||||
use object::read::macho::FatArch;
|
||||
|
||||
use crate::common;
|
||||
use crate::llvm::archive_ro::{ArchiveRO, Child};
|
||||
use crate::llvm::{self, ArchiveKind, LLVMMachineType, LLVMRustCOFFShortExport};
|
||||
use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_session::cstore::DllImport;
|
||||
use rustc_session::Session;
|
||||
|
||||
|
@ -53,13 +57,70 @@ fn llvm_machine_type(cpu: &str) -> LLVMMachineType {
|
|||
}
|
||||
}
|
||||
|
||||
fn try_filter_fat_archs(
|
||||
archs: object::read::Result<&[impl FatArch]>,
|
||||
target_arch: object::Architecture,
|
||||
archive_path: &Path,
|
||||
archive_map_data: &[u8],
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
||||
|
||||
let desired = match archs.iter().filter(|a| a.architecture() == target_arch).next() {
|
||||
Some(a) => a,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let (mut new_f, extracted_path) = tempfile::Builder::new()
|
||||
.suffix(archive_path.file_name().unwrap())
|
||||
.tempfile()?
|
||||
.keep()
|
||||
.unwrap();
|
||||
|
||||
new_f.write_all(
|
||||
desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
|
||||
)?;
|
||||
|
||||
Ok(Some(extracted_path))
|
||||
}
|
||||
|
||||
fn try_extract_macho_fat_archive(
|
||||
sess: &Session,
|
||||
archive_path: &Path,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
let archive_map = unsafe { Mmap::map(fs::File::open(&archive_path)?)? };
|
||||
let target_arch = match sess.target.arch.as_ref() {
|
||||
"aarch64" => object::Architecture::Aarch64,
|
||||
"x86_64" => object::Architecture::X86_64,
|
||||
_ => return Ok(None),
|
||||
};
|
||||
|
||||
match object::macho::FatHeader::parse(&*archive_map) {
|
||||
Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
|
||||
let archs = object::macho::FatHeader::parse_arch32(&*archive_map);
|
||||
try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
|
||||
}
|
||||
Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
|
||||
let archs = object::macho::FatHeader::parse_arch64(&*archive_map);
|
||||
try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
|
||||
}
|
||||
// Not a FatHeader at all, just return None.
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
|
||||
fn add_archive(
|
||||
&mut self,
|
||||
archive: &Path,
|
||||
skip: Box<dyn FnMut(&str) -> bool + 'static>,
|
||||
) -> io::Result<()> {
|
||||
let archive_ro = match ArchiveRO::open(archive) {
|
||||
let mut archive = archive.to_path_buf();
|
||||
if self.sess.target.llvm_target.contains("-apple-macosx") {
|
||||
if let Some(new_archive) = try_extract_macho_fat_archive(&self.sess, &archive)? {
|
||||
archive = new_archive
|
||||
}
|
||||
}
|
||||
let archive_ro = match ArchiveRO::open(&archive) {
|
||||
Ok(ar) => ar,
|
||||
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
|
||||
};
|
||||
|
@ -67,7 +128,7 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
|
|||
return Ok(());
|
||||
}
|
||||
self.additions.push(Addition::Archive {
|
||||
path: archive.to_path_buf(),
|
||||
path: archive,
|
||||
archive: archive_ro,
|
||||
skip: Box::new(skip),
|
||||
});
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
use crate::back::write::{
|
||||
self, save_temp_bitcode, to_llvm_opt_settings, with_llvm_pmb, DiagnosticHandlers,
|
||||
};
|
||||
use crate::llvm::{self, build_string, False, True};
|
||||
use crate::{llvm_util, LlvmCodegenBackend, ModuleLlvm};
|
||||
use crate::back::write::{self, save_temp_bitcode, DiagnosticHandlers};
|
||||
use crate::llvm::{self, build_string};
|
||||
use crate::{LlvmCodegenBackend, ModuleLlvm};
|
||||
use object::read::archive::ArchiveFile;
|
||||
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared};
|
||||
use rustc_codegen_ssa::back::symbol_export;
|
||||
|
@ -575,7 +573,7 @@ pub(crate) fn run_pass_manager(
|
|||
module: &mut ModuleCodegen<ModuleLlvm>,
|
||||
thin: bool,
|
||||
) -> Result<(), FatalError> {
|
||||
let _timer = cgcx.prof.extra_verbose_generic_activity("LLVM_lto_optimize", &*module.name);
|
||||
let _timer = cgcx.prof.verbose_generic_activity_with_arg("LLVM_lto_optimize", &*module.name);
|
||||
let config = cgcx.config(module.kind);
|
||||
|
||||
// Now we have one massive module inside of llmod. Time to run the
|
||||
|
@ -597,61 +595,9 @@ pub(crate) fn run_pass_manager(
|
|||
1,
|
||||
);
|
||||
}
|
||||
if llvm_util::should_use_new_llvm_pass_manager(
|
||||
&config.new_llvm_pass_manager,
|
||||
&cgcx.target_arch,
|
||||
) {
|
||||
let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO };
|
||||
let opt_level = config.opt_level.unwrap_or(config::OptLevel::No);
|
||||
write::optimize_with_new_llvm_pass_manager(
|
||||
cgcx,
|
||||
diag_handler,
|
||||
module,
|
||||
config,
|
||||
opt_level,
|
||||
opt_stage,
|
||||
)?;
|
||||
debug!("lto done");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let pm = llvm::LLVMCreatePassManager();
|
||||
llvm::LLVMAddAnalysisPasses(module.module_llvm.tm, pm);
|
||||
|
||||
if config.verify_llvm_ir {
|
||||
let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr().cast());
|
||||
llvm::LLVMRustAddPass(pm, pass.unwrap());
|
||||
}
|
||||
|
||||
let opt_level = config
|
||||
.opt_level
|
||||
.map(|x| to_llvm_opt_settings(x).0)
|
||||
.unwrap_or(llvm::CodeGenOptLevel::None);
|
||||
with_llvm_pmb(module.module_llvm.llmod(), config, opt_level, false, &mut |b| {
|
||||
if thin {
|
||||
llvm::LLVMRustPassManagerBuilderPopulateThinLTOPassManager(b, pm);
|
||||
} else {
|
||||
llvm::LLVMRustPassManagerBuilderPopulateLTOPassManager(
|
||||
b, pm, /* Internalize = */ False, /* RunInliner = */ True,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// We always generate bitcode through ThinLTOBuffers,
|
||||
// which do not support anonymous globals
|
||||
if config.bitcode_needed() {
|
||||
let pass = llvm::LLVMRustFindAndCreatePass("name-anon-globals\0".as_ptr().cast());
|
||||
llvm::LLVMRustAddPass(pm, pass.unwrap());
|
||||
}
|
||||
|
||||
if config.verify_llvm_ir {
|
||||
let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr().cast());
|
||||
llvm::LLVMRustAddPass(pm, pass.unwrap());
|
||||
}
|
||||
|
||||
llvm::LLVMRunPassManager(pm, module.module_llvm.llmod());
|
||||
|
||||
llvm::LLVMDisposePassManager(pm);
|
||||
let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO };
|
||||
let opt_level = config.opt_level.unwrap_or(config::OptLevel::No);
|
||||
write::llvm_optimize(cgcx, diag_handler, module, config, opt_level, opt_stage)?;
|
||||
}
|
||||
debug!("lto done");
|
||||
Ok(())
|
||||
|
|
|
@ -21,7 +21,6 @@ use rustc_data_structures::profiling::SelfProfilerRef;
|
|||
use rustc_data_structures::small_c_str::SmallCStr;
|
||||
use rustc_errors::{FatalError, Handler, Level};
|
||||
use rustc_fs_util::{link_or_copy, path_to_c_string};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::config::{self, Lto, OutputType, Passes, SplitDwarfKind, SwitchWithOptPath};
|
||||
use rustc_session::Session;
|
||||
|
@ -417,7 +416,7 @@ fn get_instr_profile_output_path(config: &ModuleConfig) -> Option<CString> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) unsafe fn optimize_with_new_llvm_pass_manager(
|
||||
pub(crate) unsafe fn llvm_optimize(
|
||||
cgcx: &CodegenContext<LlvmCodegenBackend>,
|
||||
diag_handler: &Handler,
|
||||
module: &ModuleCodegen<ModuleLlvm>,
|
||||
|
@ -465,7 +464,7 @@ pub(crate) unsafe fn optimize_with_new_llvm_pass_manager(
|
|||
// FIXME: NewPM doesn't provide a facility to pass custom InlineParams.
|
||||
// We would have to add upstream support for this first, before we can support
|
||||
// config.inline_threshold and our more aggressive default thresholds.
|
||||
let result = llvm::LLVMRustOptimizeWithNewPassManager(
|
||||
let result = llvm::LLVMRustOptimize(
|
||||
module.module_llvm.llmod(),
|
||||
&*module.module_llvm.tm,
|
||||
to_pass_builder_opt_level(opt_level),
|
||||
|
@ -509,18 +508,11 @@ pub(crate) unsafe fn optimize(
|
|||
|
||||
let llmod = module.module_llvm.llmod();
|
||||
let llcx = &*module.module_llvm.llcx;
|
||||
let tm = &*module.module_llvm.tm;
|
||||
let _handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
|
||||
|
||||
let module_name = module.name.clone();
|
||||
let module_name = Some(&module_name[..]);
|
||||
|
||||
if let Some(false) = config.new_llvm_pass_manager && llvm_util::get_version() >= (15, 0, 0) {
|
||||
diag_handler.warn(
|
||||
"ignoring `-Z new-llvm-pass-manager=no`, which is no longer supported with LLVM 15",
|
||||
);
|
||||
}
|
||||
|
||||
if config.emit_no_opt_bc {
|
||||
let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name);
|
||||
let out = path_to_c_string(&out);
|
||||
|
@ -528,191 +520,24 @@ pub(crate) unsafe fn optimize(
|
|||
}
|
||||
|
||||
if let Some(opt_level) = config.opt_level {
|
||||
if llvm_util::should_use_new_llvm_pass_manager(
|
||||
&config.new_llvm_pass_manager,
|
||||
&cgcx.target_arch,
|
||||
) {
|
||||
let opt_stage = match cgcx.lto {
|
||||
Lto::Fat => llvm::OptStage::PreLinkFatLTO,
|
||||
Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO,
|
||||
_ if cgcx.opts.cg.linker_plugin_lto.enabled() => llvm::OptStage::PreLinkThinLTO,
|
||||
_ => llvm::OptStage::PreLinkNoLTO,
|
||||
};
|
||||
return optimize_with_new_llvm_pass_manager(
|
||||
cgcx,
|
||||
diag_handler,
|
||||
module,
|
||||
config,
|
||||
opt_level,
|
||||
opt_stage,
|
||||
);
|
||||
}
|
||||
|
||||
if cgcx.prof.llvm_recording_enabled() {
|
||||
diag_handler
|
||||
.warn("`-Z self-profile-events = llvm` requires `-Z new-llvm-pass-manager`");
|
||||
}
|
||||
|
||||
// Create the two optimizing pass managers. These mirror what clang
|
||||
// does, and are by populated by LLVM's default PassManagerBuilder.
|
||||
// Each manager has a different set of passes, but they also share
|
||||
// some common passes.
|
||||
let fpm = llvm::LLVMCreateFunctionPassManagerForModule(llmod);
|
||||
let mpm = llvm::LLVMCreatePassManager();
|
||||
|
||||
{
|
||||
let find_pass = |pass_name: &str| {
|
||||
let pass_name = SmallCStr::new(pass_name);
|
||||
llvm::LLVMRustFindAndCreatePass(pass_name.as_ptr())
|
||||
};
|
||||
|
||||
if config.verify_llvm_ir {
|
||||
// Verification should run as the very first pass.
|
||||
llvm::LLVMRustAddPass(fpm, find_pass("verify").unwrap());
|
||||
}
|
||||
|
||||
let mut extra_passes = Vec::new();
|
||||
let mut have_name_anon_globals_pass = false;
|
||||
|
||||
for pass_name in &config.passes {
|
||||
if pass_name == "lint" {
|
||||
// Linting should also be performed early, directly on the generated IR.
|
||||
llvm::LLVMRustAddPass(fpm, find_pass("lint").unwrap());
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(pass) = find_pass(pass_name) {
|
||||
extra_passes.push(pass);
|
||||
} else {
|
||||
diag_handler.warn(&format!("unknown pass `{}`, ignoring", pass_name));
|
||||
}
|
||||
|
||||
if pass_name == "name-anon-globals" {
|
||||
have_name_anon_globals_pass = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Instrumentation must be inserted before optimization,
|
||||
// otherwise LLVM may optimize some functions away which
|
||||
// breaks llvm-cov.
|
||||
//
|
||||
// This mirrors what Clang does in lib/CodeGen/BackendUtil.cpp.
|
||||
if config.instrument_gcov {
|
||||
llvm::LLVMRustAddPass(mpm, find_pass("insert-gcov-profiling").unwrap());
|
||||
}
|
||||
if config.instrument_coverage {
|
||||
llvm::LLVMRustAddPass(mpm, find_pass("instrprof").unwrap());
|
||||
}
|
||||
if config.debug_info_for_profiling {
|
||||
llvm::LLVMRustAddPass(mpm, find_pass("add-discriminators").unwrap());
|
||||
}
|
||||
|
||||
add_sanitizer_passes(config, &mut extra_passes);
|
||||
|
||||
// Some options cause LLVM bitcode to be emitted, which uses ThinLTOBuffers, so we need
|
||||
// to make sure we run LLVM's NameAnonGlobals pass when emitting bitcode; otherwise
|
||||
// we'll get errors in LLVM.
|
||||
let using_thin_buffers = config.bitcode_needed();
|
||||
if !config.no_prepopulate_passes {
|
||||
llvm::LLVMAddAnalysisPasses(tm, fpm);
|
||||
llvm::LLVMAddAnalysisPasses(tm, mpm);
|
||||
let opt_level = to_llvm_opt_settings(opt_level).0;
|
||||
let prepare_for_thin_lto = cgcx.lto == Lto::Thin
|
||||
|| cgcx.lto == Lto::ThinLocal
|
||||
|| (cgcx.lto != Lto::Fat && cgcx.opts.cg.linker_plugin_lto.enabled());
|
||||
with_llvm_pmb(llmod, config, opt_level, prepare_for_thin_lto, &mut |b| {
|
||||
llvm::LLVMRustAddLastExtensionPasses(
|
||||
b,
|
||||
extra_passes.as_ptr(),
|
||||
extra_passes.len() as size_t,
|
||||
);
|
||||
llvm::LLVMRustPassManagerBuilderPopulateFunctionPassManager(b, fpm);
|
||||
llvm::LLVMRustPassManagerBuilderPopulateModulePassManager(b, mpm);
|
||||
});
|
||||
|
||||
have_name_anon_globals_pass = have_name_anon_globals_pass || prepare_for_thin_lto;
|
||||
if using_thin_buffers && !prepare_for_thin_lto {
|
||||
llvm::LLVMRustAddPass(mpm, find_pass("name-anon-globals").unwrap());
|
||||
have_name_anon_globals_pass = true;
|
||||
}
|
||||
} else {
|
||||
// If we don't use the standard pipeline, directly populate the MPM
|
||||
// with the extra passes.
|
||||
for pass in extra_passes {
|
||||
llvm::LLVMRustAddPass(mpm, pass);
|
||||
}
|
||||
}
|
||||
|
||||
if using_thin_buffers && !have_name_anon_globals_pass {
|
||||
// As described above, this will probably cause an error in LLVM
|
||||
if config.no_prepopulate_passes {
|
||||
diag_handler.err(
|
||||
"The current compilation is going to use thin LTO buffers \
|
||||
without running LLVM's NameAnonGlobals pass. \
|
||||
This will likely cause errors in LLVM. Consider adding \
|
||||
-C passes=name-anon-globals to the compiler command line.",
|
||||
);
|
||||
} else {
|
||||
bug!(
|
||||
"We are using thin LTO buffers without running the NameAnonGlobals pass. \
|
||||
This will likely cause errors in LLVM and should never happen."
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
diag_handler.abort_if_errors();
|
||||
|
||||
// Finally, run the actual optimization passes
|
||||
{
|
||||
let _timer = cgcx.prof.extra_verbose_generic_activity(
|
||||
"LLVM_module_optimize_function_passes",
|
||||
&*module.name,
|
||||
);
|
||||
llvm::LLVMRustRunFunctionPassManager(fpm, llmod);
|
||||
}
|
||||
{
|
||||
let _timer = cgcx.prof.extra_verbose_generic_activity(
|
||||
"LLVM_module_optimize_module_passes",
|
||||
&*module.name,
|
||||
);
|
||||
llvm::LLVMRunPassManager(mpm, llmod);
|
||||
}
|
||||
|
||||
// Deallocate managers that we're now done with
|
||||
llvm::LLVMDisposePassManager(fpm);
|
||||
llvm::LLVMDisposePassManager(mpm);
|
||||
let opt_stage = match cgcx.lto {
|
||||
Lto::Fat => llvm::OptStage::PreLinkFatLTO,
|
||||
Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO,
|
||||
_ if cgcx.opts.cg.linker_plugin_lto.enabled() => llvm::OptStage::PreLinkThinLTO,
|
||||
_ => llvm::OptStage::PreLinkNoLTO,
|
||||
};
|
||||
return llvm_optimize(cgcx, diag_handler, module, config, opt_level, opt_stage);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
unsafe fn add_sanitizer_passes(config: &ModuleConfig, passes: &mut Vec<&'static mut llvm::Pass>) {
|
||||
if config.sanitizer.contains(SanitizerSet::ADDRESS) {
|
||||
let recover = config.sanitizer_recover.contains(SanitizerSet::ADDRESS);
|
||||
passes.push(llvm::LLVMRustCreateAddressSanitizerFunctionPass(recover));
|
||||
passes.push(llvm::LLVMRustCreateModuleAddressSanitizerPass(recover));
|
||||
}
|
||||
if config.sanitizer.contains(SanitizerSet::MEMORY) {
|
||||
let track_origins = config.sanitizer_memory_track_origins as c_int;
|
||||
let recover = config.sanitizer_recover.contains(SanitizerSet::MEMORY);
|
||||
passes.push(llvm::LLVMRustCreateMemorySanitizerPass(track_origins, recover));
|
||||
}
|
||||
if config.sanitizer.contains(SanitizerSet::THREAD) {
|
||||
passes.push(llvm::LLVMRustCreateThreadSanitizerPass());
|
||||
}
|
||||
if config.sanitizer.contains(SanitizerSet::HWADDRESS) {
|
||||
let recover = config.sanitizer_recover.contains(SanitizerSet::HWADDRESS);
|
||||
passes.push(llvm::LLVMRustCreateHWAddressSanitizerPass(recover));
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn link(
|
||||
cgcx: &CodegenContext<LlvmCodegenBackend>,
|
||||
diag_handler: &Handler,
|
||||
mut modules: Vec<ModuleCodegen<ModuleLlvm>>,
|
||||
) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
|
||||
use super::lto::{Linker, ModuleBuffer};
|
||||
// Sort the modules by name to ensure to ensure deterministic behavior.
|
||||
// Sort the modules by name to ensure deterministic behavior.
|
||||
modules.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
let (first, elements) =
|
||||
modules.split_first().expect("Bug! modules must contain at least one module.");
|
||||
|
@ -1072,72 +897,6 @@ unsafe fn embed_bitcode(
|
|||
}
|
||||
}
|
||||
|
||||
pub unsafe fn with_llvm_pmb(
|
||||
llmod: &llvm::Module,
|
||||
config: &ModuleConfig,
|
||||
opt_level: llvm::CodeGenOptLevel,
|
||||
prepare_for_thin_lto: bool,
|
||||
f: &mut dyn FnMut(&llvm::PassManagerBuilder),
|
||||
) {
|
||||
use std::ptr;
|
||||
|
||||
// Create the PassManagerBuilder for LLVM. We configure it with
|
||||
// reasonable defaults and prepare it to actually populate the pass
|
||||
// manager.
|
||||
let builder = llvm::LLVMRustPassManagerBuilderCreate();
|
||||
let opt_size = config.opt_size.map_or(llvm::CodeGenOptSizeNone, |x| to_llvm_opt_settings(x).1);
|
||||
let inline_threshold = config.inline_threshold;
|
||||
let pgo_gen_path = get_pgo_gen_path(config);
|
||||
let pgo_use_path = get_pgo_use_path(config);
|
||||
let pgo_sample_use_path = get_pgo_sample_use_path(config);
|
||||
|
||||
llvm::LLVMRustConfigurePassManagerBuilder(
|
||||
builder,
|
||||
opt_level,
|
||||
config.merge_functions,
|
||||
config.vectorize_slp,
|
||||
config.vectorize_loop,
|
||||
prepare_for_thin_lto,
|
||||
pgo_gen_path.as_ref().map_or(ptr::null(), |s| s.as_ptr()),
|
||||
pgo_use_path.as_ref().map_or(ptr::null(), |s| s.as_ptr()),
|
||||
pgo_sample_use_path.as_ref().map_or(ptr::null(), |s| s.as_ptr()),
|
||||
opt_size as c_int,
|
||||
);
|
||||
|
||||
llvm::LLVMRustAddBuilderLibraryInfo(builder, llmod, config.no_builtins);
|
||||
|
||||
// Here we match what clang does (kinda). For O0 we only inline
|
||||
// always-inline functions (but don't add lifetime intrinsics), at O1 we
|
||||
// inline with lifetime intrinsics, and O2+ we add an inliner with a
|
||||
// thresholds copied from clang.
|
||||
match (opt_level, opt_size, inline_threshold) {
|
||||
(.., Some(t)) => {
|
||||
llvm::LLVMRustPassManagerBuilderUseInlinerWithThreshold(builder, t);
|
||||
}
|
||||
(llvm::CodeGenOptLevel::Aggressive, ..) => {
|
||||
llvm::LLVMRustPassManagerBuilderUseInlinerWithThreshold(builder, 275);
|
||||
}
|
||||
(_, llvm::CodeGenOptSizeDefault, _) => {
|
||||
llvm::LLVMRustPassManagerBuilderUseInlinerWithThreshold(builder, 75);
|
||||
}
|
||||
(_, llvm::CodeGenOptSizeAggressive, _) => {
|
||||
llvm::LLVMRustPassManagerBuilderUseInlinerWithThreshold(builder, 25);
|
||||
}
|
||||
(llvm::CodeGenOptLevel::None, ..) => {
|
||||
llvm::LLVMRustAddAlwaysInlinePass(builder, config.emit_lifetime_markers);
|
||||
}
|
||||
(llvm::CodeGenOptLevel::Less, ..) => {
|
||||
llvm::LLVMRustAddAlwaysInlinePass(builder, config.emit_lifetime_markers);
|
||||
}
|
||||
(llvm::CodeGenOptLevel::Default, ..) => {
|
||||
llvm::LLVMRustPassManagerBuilderUseInlinerWithThreshold(builder, 225);
|
||||
}
|
||||
}
|
||||
|
||||
f(builder);
|
||||
llvm::LLVMRustPassManagerBuilderDispose(builder);
|
||||
}
|
||||
|
||||
// Create a `__imp_<symbol> = &symbol` global for every public static `symbol`.
|
||||
// This is required to satisfy `dllimport` references to static data in .rlibs
|
||||
// when using MSVC linker. We do this only for data, as linker can fix up
|
||||
|
|
|
@ -19,6 +19,8 @@ use crate::context::CodegenCx;
|
|||
use crate::llvm;
|
||||
use crate::value::Value;
|
||||
|
||||
use cstr::cstr;
|
||||
|
||||
use rustc_codegen_ssa::base::maybe_create_entry_wrapper;
|
||||
use rustc_codegen_ssa::mono_item::MonoItemExt;
|
||||
use rustc_codegen_ssa::traits::*;
|
||||
|
@ -107,11 +109,14 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen
|
|||
}
|
||||
|
||||
// Create the llvm.used and llvm.compiler.used variables.
|
||||
if !cx.used_statics().borrow().is_empty() {
|
||||
cx.create_used_variable()
|
||||
if !cx.used_statics.borrow().is_empty() {
|
||||
cx.create_used_variable_impl(cstr!("llvm.used"), &*cx.used_statics.borrow());
|
||||
}
|
||||
if !cx.compiler_used_statics().borrow().is_empty() {
|
||||
cx.create_compiler_used_variable()
|
||||
if !cx.compiler_used_statics.borrow().is_empty() {
|
||||
cx.create_used_variable_impl(
|
||||
cstr!("llvm.compiler.used"),
|
||||
&*cx.compiler_used_statics.borrow(),
|
||||
);
|
||||
}
|
||||
|
||||
// Run replace-all-uses-with for statics that need it. This must
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use crate::abi::FnAbiLlvmExt;
|
||||
use crate::attributes;
|
||||
use crate::common::Funclet;
|
||||
use crate::context::CodegenCx;
|
||||
|
@ -214,6 +215,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
fn invoke(
|
||||
&mut self,
|
||||
llty: &'ll Type,
|
||||
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
|
||||
llfn: &'ll Value,
|
||||
args: &[&'ll Value],
|
||||
then: &'ll BasicBlock,
|
||||
|
@ -226,7 +228,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
let bundle = funclet.map(|funclet| funclet.bundle());
|
||||
let bundle = bundle.as_ref().map(|b| &*b.raw);
|
||||
|
||||
unsafe {
|
||||
let invoke = unsafe {
|
||||
llvm::LLVMRustBuildInvoke(
|
||||
self.llbuilder,
|
||||
llty,
|
||||
|
@ -238,7 +240,11 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
bundle,
|
||||
UNNAMED,
|
||||
)
|
||||
};
|
||||
if let Some(fn_abi) = fn_abi {
|
||||
fn_abi.apply_attrs_callsite(self, invoke);
|
||||
}
|
||||
invoke
|
||||
}
|
||||
|
||||
fn unreachable(&mut self) {
|
||||
|
@ -405,20 +411,17 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
fn alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
|
||||
let mut bx = Builder::with_cx(self.cx);
|
||||
bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
|
||||
bx.dynamic_alloca(ty, align)
|
||||
}
|
||||
|
||||
fn dynamic_alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
|
||||
unsafe {
|
||||
let alloca = llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED);
|
||||
let alloca = llvm::LLVMBuildAlloca(bx.llbuilder, ty, UNNAMED);
|
||||
llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
|
||||
alloca
|
||||
}
|
||||
}
|
||||
|
||||
fn array_alloca(&mut self, ty: &'ll Type, len: &'ll Value, align: Align) -> &'ll Value {
|
||||
fn byte_array_alloca(&mut self, len: &'ll Value, align: Align) -> &'ll Value {
|
||||
unsafe {
|
||||
let alloca = llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, UNNAMED);
|
||||
let alloca =
|
||||
llvm::LLVMBuildArrayAlloca(self.llbuilder, self.cx().type_i8(), len, UNNAMED);
|
||||
llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
|
||||
alloca
|
||||
}
|
||||
|
@ -1145,6 +1148,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
fn call(
|
||||
&mut self,
|
||||
llty: &'ll Type,
|
||||
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
|
||||
llfn: &'ll Value,
|
||||
args: &[&'ll Value],
|
||||
funclet: Option<&Funclet<'ll>>,
|
||||
|
@ -1155,7 +1159,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
let bundle = funclet.map(|funclet| funclet.bundle());
|
||||
let bundle = bundle.as_ref().map(|b| &*b.raw);
|
||||
|
||||
unsafe {
|
||||
let call = unsafe {
|
||||
llvm::LLVMRustBuildCall(
|
||||
self.llbuilder,
|
||||
llty,
|
||||
|
@ -1164,7 +1168,11 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||
args.len() as c_uint,
|
||||
bundle,
|
||||
)
|
||||
};
|
||||
if let Some(fn_abi) = fn_abi {
|
||||
fn_abi.apply_attrs_callsite(self, call);
|
||||
}
|
||||
call
|
||||
}
|
||||
|
||||
fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
|
@ -1397,7 +1405,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
|
|||
|
||||
pub(crate) fn call_intrinsic(&mut self, intrinsic: &str, args: &[&'ll Value]) -> &'ll Value {
|
||||
let (ty, f) = self.cx.get_intrinsic(intrinsic);
|
||||
self.call(ty, f, args, None)
|
||||
self.call(ty, None, f, args, None)
|
||||
}
|
||||
|
||||
fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
|
||||
|
@ -1459,7 +1467,7 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
|
|||
format!("llvm.{}.sat.i{}.f{}", instr, int_width, float_width)
|
||||
};
|
||||
let f = self.declare_cfn(&name, llvm::UnnamedAddr::No, self.type_func(&[src_ty], dest_ty));
|
||||
self.call(self.type_func(&[src_ty], dest_ty), f, &[val], None)
|
||||
self.call(self.type_func(&[src_ty], dest_ty), None, f, &[val], None)
|
||||
}
|
||||
|
||||
pub(crate) fn landing_pad(
|
||||
|
|
|
@ -552,7 +552,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
|
|||
// `#[used(compiler)]` is explicitly requested. This is to avoid similar breakage
|
||||
// on other targets, in particular MachO targets have *their* static constructor
|
||||
// lists broken if `llvm.compiler.used` is emitted rather than llvm.used. However,
|
||||
// that check happens when assigning the `CodegenFnAttrFlags` in `rustc_typeck`,
|
||||
// that check happens when assigning the `CodegenFnAttrFlags` in `rustc_hir_analysis`,
|
||||
// so we don't need to take care of it here.
|
||||
self.add_compiler_used_global(g);
|
||||
}
|
||||
|
|
|
@ -154,6 +154,11 @@ pub unsafe fn create_module<'ll>(
|
|||
target_data_layout = target_data_layout.replace("-p10:8:8-p20:8:8", "");
|
||||
}
|
||||
}
|
||||
if llvm_version < (16, 0, 0) {
|
||||
if sess.target.arch == "s390x" {
|
||||
target_data_layout = target_data_layout.replace("-v128:64", "");
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure the data-layout values hardcoded remain the defaults.
|
||||
if sess.target.is_builtin {
|
||||
|
@ -453,7 +458,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
|
|||
self.coverage_cx.as_ref()
|
||||
}
|
||||
|
||||
fn create_used_variable_impl(&self, name: &'static CStr, values: &[&'ll Value]) {
|
||||
pub(crate) fn create_used_variable_impl(&self, name: &'static CStr, values: &[&'ll Value]) {
|
||||
let section = cstr!("llvm.metadata");
|
||||
let array = self.const_array(self.type_ptr_to(self.type_i8()), values);
|
||||
|
||||
|
@ -551,14 +556,6 @@ impl<'ll, 'tcx> MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
|||
self.codegen_unit
|
||||
}
|
||||
|
||||
fn used_statics(&self) -> &RefCell<Vec<&'ll Value>> {
|
||||
&self.used_statics
|
||||
}
|
||||
|
||||
fn compiler_used_statics(&self) -> &RefCell<Vec<&'ll Value>> {
|
||||
&self.compiler_used_statics
|
||||
}
|
||||
|
||||
fn set_frame_pointer_type(&self, llfn: &'ll Value) {
|
||||
if let Some(attr) = attributes::frame_pointer_type_attr(self) {
|
||||
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[attr]);
|
||||
|
@ -572,17 +569,6 @@ impl<'ll, 'tcx> MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
|||
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &attrs);
|
||||
}
|
||||
|
||||
fn create_used_variable(&self) {
|
||||
self.create_used_variable_impl(cstr!("llvm.used"), &*self.used_statics.borrow());
|
||||
}
|
||||
|
||||
fn create_compiler_used_variable(&self) {
|
||||
self.create_used_variable_impl(
|
||||
cstr!("llvm.compiler.used"),
|
||||
&*self.compiler_used_statics.borrow(),
|
||||
);
|
||||
}
|
||||
|
||||
fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
|
||||
if self.get_declared_value("main").is_none() {
|
||||
Some(self.declare_cfn("main", llvm::UnnamedAddr::Global, fn_type))
|
||||
|
|
|
@ -129,7 +129,7 @@ impl CoverageMapGenerator {
|
|||
// LLVM Coverage Mapping Format version 6 (zero-based encoded as 5)
|
||||
// requires setting the first filename to the compilation directory.
|
||||
// Since rustc generates coverage maps with relative paths, the
|
||||
// compilation directory can be combined with the the relative paths
|
||||
// compilation directory can be combined with the relative paths
|
||||
// to get absolute paths, if needed.
|
||||
let working_dir = tcx
|
||||
.sess
|
||||
|
|
|
@ -32,6 +32,7 @@ fn declare_raw_fn<'ll>(
|
|||
name: &str,
|
||||
callconv: llvm::CallConv,
|
||||
unnamed: llvm::UnnamedAddr,
|
||||
visibility: llvm::Visibility,
|
||||
ty: &'ll Type,
|
||||
) -> &'ll Value {
|
||||
debug!("declare_raw_fn(name={:?}, ty={:?})", name, ty);
|
||||
|
@ -41,6 +42,7 @@ fn declare_raw_fn<'ll>(
|
|||
|
||||
llvm::SetFunctionCallConv(llfn, callconv);
|
||||
llvm::SetUnnamedAddress(llfn, unnamed);
|
||||
llvm::set_visibility(llfn, visibility);
|
||||
|
||||
let mut attrs = SmallVec::<[_; 4]>::new();
|
||||
|
||||
|
@ -78,7 +80,14 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
|
|||
unnamed: llvm::UnnamedAddr,
|
||||
fn_type: &'ll Type,
|
||||
) -> &'ll Value {
|
||||
declare_raw_fn(self, name, llvm::CCallConv, unnamed, fn_type)
|
||||
// Declare C ABI functions with the visibility used by C by default.
|
||||
let visibility = if self.tcx.sess.target.default_hidden_visibility {
|
||||
llvm::Visibility::Hidden
|
||||
} else {
|
||||
llvm::Visibility::Default
|
||||
};
|
||||
|
||||
declare_raw_fn(self, name, llvm::CCallConv, unnamed, visibility, fn_type)
|
||||
}
|
||||
|
||||
/// Declare a Rust function.
|
||||
|
@ -95,6 +104,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
|
|||
name,
|
||||
fn_abi.llvm_cconv(),
|
||||
llvm::UnnamedAddr::Global,
|
||||
llvm::Visibility::Default,
|
||||
fn_abi.llvm_type(self),
|
||||
);
|
||||
fn_abi.apply_attrs_llfn(self, llfn);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue