Auto merge of #127278 - matthiaskrgr:rollup-fjexkdr, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #126803 (Change `asm-comments` to `verbose-asm`, always emit user comments) - #127050 (Make mtime of reproducible tarballs dependent on git commit) - #127145 (Add `as_lang_item` to `LanguageItems`, new trait solver) - #127202 (Remove global error count checks from typeck) - #127233 (Some parser cleanups) - #127248 (Add parse fail test using safe trait/impl trait) - #127264 (Small `run-make-support` API improvements) - #127270 (bootstrap: pass correct struct size to winapi) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
1cfd47fe0b
44 changed files with 558 additions and 273 deletions
|
@ -204,12 +204,14 @@ impl Attribute {
|
||||||
|
|
||||||
pub fn tokens(&self) -> TokenStream {
|
pub fn tokens(&self) -> TokenStream {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
AttrKind::Normal(normal) => normal
|
AttrKind::Normal(normal) => TokenStream::new(
|
||||||
.tokens
|
normal
|
||||||
.as_ref()
|
.tokens
|
||||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
.as_ref()
|
||||||
.to_attr_token_stream()
|
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
||||||
.to_tokenstream(),
|
.to_attr_token_stream()
|
||||||
|
.to_token_trees(),
|
||||||
|
),
|
||||||
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
|
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
|
||||||
token::DocComment(comment_kind, self.style, data),
|
token::DocComment(comment_kind, self.style, data),
|
||||||
self.span,
|
self.span,
|
||||||
|
|
|
@ -23,7 +23,6 @@ use rustc_data_structures::sync::{self, Lrc};
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
use rustc_serialize::{Decodable, Encodable};
|
use rustc_serialize::{Decodable, Encodable};
|
||||||
use rustc_span::{sym, Span, SpanDecoder, SpanEncoder, Symbol, DUMMY_SP};
|
use rustc_span::{sym, Span, SpanDecoder, SpanEncoder, Symbol, DUMMY_SP};
|
||||||
use smallvec::{smallvec, SmallVec};
|
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::{cmp, fmt, iter};
|
use std::{cmp, fmt, iter};
|
||||||
|
@ -180,27 +179,25 @@ impl AttrTokenStream {
|
||||||
AttrTokenStream(Lrc::new(tokens))
|
AttrTokenStream(Lrc::new(tokens))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts this `AttrTokenStream` to a plain `TokenStream`.
|
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
|
||||||
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
||||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||||
/// If there are inner attributes, they are inserted into the proper
|
/// If there are inner attributes, they are inserted into the proper
|
||||||
/// place in the attribute target tokens.
|
/// place in the attribute target tokens.
|
||||||
pub fn to_tokenstream(&self) -> TokenStream {
|
pub fn to_token_trees(&self) -> Vec<TokenTree> {
|
||||||
let trees: Vec<_> = self
|
let mut res = Vec::with_capacity(self.0.len());
|
||||||
.0
|
for tree in self.0.iter() {
|
||||||
.iter()
|
match tree {
|
||||||
.flat_map(|tree| match &tree {
|
|
||||||
AttrTokenTree::Token(inner, spacing) => {
|
AttrTokenTree::Token(inner, spacing) => {
|
||||||
smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
|
res.push(TokenTree::Token(inner.clone(), *spacing));
|
||||||
}
|
}
|
||||||
AttrTokenTree::Delimited(span, spacing, delim, stream) => {
|
AttrTokenTree::Delimited(span, spacing, delim, stream) => {
|
||||||
smallvec![TokenTree::Delimited(
|
res.push(TokenTree::Delimited(
|
||||||
*span,
|
*span,
|
||||||
*spacing,
|
*spacing,
|
||||||
*delim,
|
*delim,
|
||||||
stream.to_tokenstream()
|
TokenStream::new(stream.to_token_trees()),
|
||||||
),]
|
))
|
||||||
.into_iter()
|
|
||||||
}
|
}
|
||||||
AttrTokenTree::Attributes(data) => {
|
AttrTokenTree::Attributes(data) => {
|
||||||
let idx = data
|
let idx = data
|
||||||
|
@ -208,14 +205,7 @@ impl AttrTokenStream {
|
||||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
||||||
|
|
||||||
let mut target_tokens: Vec<_> = data
|
let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
|
||||||
.tokens
|
|
||||||
.to_attr_token_stream()
|
|
||||||
.to_tokenstream()
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
if !inner_attrs.is_empty() {
|
if !inner_attrs.is_empty() {
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
// Check the last two trees (to account for a trailing semi)
|
// Check the last two trees (to account for a trailing semi)
|
||||||
|
@ -251,17 +241,14 @@ impl AttrTokenStream {
|
||||||
"Failed to find trailing delimited group in: {target_tokens:?}"
|
"Failed to find trailing delimited group in: {target_tokens:?}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let mut flat: SmallVec<[_; 1]> =
|
|
||||||
SmallVec::with_capacity(target_tokens.len() + outer_attrs.len());
|
|
||||||
for attr in outer_attrs {
|
for attr in outer_attrs {
|
||||||
flat.extend(attr.tokens().0.iter().cloned());
|
res.extend(attr.tokens().0.iter().cloned());
|
||||||
}
|
}
|
||||||
flat.extend(target_tokens);
|
res.extend(target_tokens);
|
||||||
flat.into_iter()
|
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
.collect();
|
}
|
||||||
TokenStream::new(trees)
|
res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -409,8 +396,8 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenStream {
|
impl TokenStream {
|
||||||
pub fn new(streams: Vec<TokenTree>) -> TokenStream {
|
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
|
||||||
TokenStream(Lrc::new(streams))
|
TokenStream(Lrc::new(tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
|
@ -461,7 +448,7 @@ impl TokenStream {
|
||||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||||
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
||||||
};
|
};
|
||||||
attr_stream.to_tokenstream()
|
TokenStream::new(attr_stream.to_token_trees())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
|
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
|
||||||
|
|
|
@ -38,16 +38,14 @@ pub(crate) fn cfg_eval(
|
||||||
lint_node_id: NodeId,
|
lint_node_id: NodeId,
|
||||||
) -> Annotatable {
|
) -> Annotatable {
|
||||||
let features = Some(features);
|
let features = Some(features);
|
||||||
CfgEval { cfg: &mut StripUnconfigured { sess, features, config_tokens: true, lint_node_id } }
|
CfgEval(StripUnconfigured { sess, features, config_tokens: true, lint_node_id })
|
||||||
.configure_annotatable(annotatable)
|
.configure_annotatable(annotatable)
|
||||||
// Since the item itself has already been configured by the `InvocationCollector`,
|
// Since the item itself has already been configured by the `InvocationCollector`,
|
||||||
// we know that fold result vector will contain exactly one element.
|
// we know that fold result vector will contain exactly one element.
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CfgEval<'a, 'b> {
|
struct CfgEval<'a>(StripUnconfigured<'a>);
|
||||||
cfg: &'a mut StripUnconfigured<'b>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flat_map_annotatable(
|
fn flat_map_annotatable(
|
||||||
vis: &mut impl MutVisitor,
|
vis: &mut impl MutVisitor,
|
||||||
|
@ -125,9 +123,9 @@ fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
|
||||||
res.is_break()
|
res.is_break()
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CfgEval<'_, '_> {
|
impl CfgEval<'_> {
|
||||||
fn configure<T: HasAttrs + HasTokens>(&mut self, node: T) -> Option<T> {
|
fn configure<T: HasAttrs + HasTokens>(&mut self, node: T) -> Option<T> {
|
||||||
self.cfg.configure(node)
|
self.0.configure(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Option<Annotatable> {
|
fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Option<Annotatable> {
|
||||||
|
@ -196,7 +194,7 @@ impl CfgEval<'_, '_> {
|
||||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||||
// to the captured `AttrTokenStream` (specifically, we capture
|
// to the captured `AttrTokenStream` (specifically, we capture
|
||||||
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||||
let mut parser = Parser::new(&self.cfg.sess.psess, orig_tokens, None);
|
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
|
||||||
parser.capture_cfg = true;
|
parser.capture_cfg = true;
|
||||||
match parse_annotatable_with(&mut parser) {
|
match parse_annotatable_with(&mut parser) {
|
||||||
Ok(a) => annotatable = a,
|
Ok(a) => annotatable = a,
|
||||||
|
@ -212,16 +210,16 @@ impl CfgEval<'_, '_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MutVisitor for CfgEval<'_, '_> {
|
impl MutVisitor for CfgEval<'_> {
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||||
self.cfg.configure_expr(expr, false);
|
self.0.configure_expr(expr, false);
|
||||||
mut_visit::noop_visit_expr(expr, self);
|
mut_visit::noop_visit_expr(expr, self);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
fn visit_method_receiver_expr(&mut self, expr: &mut P<ast::Expr>) {
|
fn visit_method_receiver_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||||
self.cfg.configure_expr(expr, true);
|
self.0.configure_expr(expr, true);
|
||||||
mut_visit::noop_visit_expr(expr, self);
|
mut_visit::noop_visit_expr(expr, self);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ impl OwnedTargetMachine {
|
||||||
unique_section_names: bool,
|
unique_section_names: bool,
|
||||||
trap_unreachable: bool,
|
trap_unreachable: bool,
|
||||||
singletree: bool,
|
singletree: bool,
|
||||||
asm_comments: bool,
|
verbose_asm: bool,
|
||||||
emit_stack_size_section: bool,
|
emit_stack_size_section: bool,
|
||||||
relax_elf_relocations: bool,
|
relax_elf_relocations: bool,
|
||||||
use_init_array: bool,
|
use_init_array: bool,
|
||||||
|
@ -64,7 +64,7 @@ impl OwnedTargetMachine {
|
||||||
unique_section_names,
|
unique_section_names,
|
||||||
trap_unreachable,
|
trap_unreachable,
|
||||||
singletree,
|
singletree,
|
||||||
asm_comments,
|
verbose_asm,
|
||||||
emit_stack_size_section,
|
emit_stack_size_section,
|
||||||
relax_elf_relocations,
|
relax_elf_relocations,
|
||||||
use_init_array,
|
use_init_array,
|
||||||
|
|
|
@ -214,7 +214,7 @@ pub fn target_machine_factory(
|
||||||
sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
|
sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
|
||||||
let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
|
let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
|
||||||
|
|
||||||
let asm_comments = sess.opts.unstable_opts.asm_comments;
|
let verbose_asm = sess.opts.unstable_opts.verbose_asm;
|
||||||
let relax_elf_relocations =
|
let relax_elf_relocations =
|
||||||
sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
|
sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
|
||||||
|
|
||||||
|
@ -289,7 +289,7 @@ pub fn target_machine_factory(
|
||||||
funique_section_names,
|
funique_section_names,
|
||||||
trap_unreachable,
|
trap_unreachable,
|
||||||
singlethread,
|
singlethread,
|
||||||
asm_comments,
|
verbose_asm,
|
||||||
emit_stack_size_section,
|
emit_stack_size_section,
|
||||||
relax_elf_relocations,
|
relax_elf_relocations,
|
||||||
use_init_array,
|
use_init_array,
|
||||||
|
|
|
@ -2185,7 +2185,7 @@ extern "C" {
|
||||||
UniqueSectionNames: bool,
|
UniqueSectionNames: bool,
|
||||||
TrapUnreachable: bool,
|
TrapUnreachable: bool,
|
||||||
Singlethread: bool,
|
Singlethread: bool,
|
||||||
AsmComments: bool,
|
VerboseAsm: bool,
|
||||||
EmitStackSizeSection: bool,
|
EmitStackSizeSection: bool,
|
||||||
RelaxELFRelocations: bool,
|
RelaxELFRelocations: bool,
|
||||||
UseInitArray: bool,
|
UseInitArray: bool,
|
||||||
|
|
|
@ -120,21 +120,21 @@ struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||||
|
|
||||||
struct BestFailure {
|
struct BestFailure {
|
||||||
token: Token,
|
token: Token,
|
||||||
position_in_tokenstream: usize,
|
position_in_tokenstream: u32,
|
||||||
msg: &'static str,
|
msg: &'static str,
|
||||||
remaining_matcher: MatcherLoc,
|
remaining_matcher: MatcherLoc,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BestFailure {
|
impl BestFailure {
|
||||||
fn is_better_position(&self, position: usize) -> bool {
|
fn is_better_position(&self, position: u32) -> bool {
|
||||||
position > self.position_in_tokenstream
|
position > self.position_in_tokenstream
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||||
type Failure = (Token, usize, &'static str);
|
type Failure = (Token, u32, &'static str);
|
||||||
|
|
||||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||||
(tok, position, msg)
|
(tok, position, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -211,9 +211,9 @@ impl<'matcher> FailureForwarder<'matcher> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'matcher> Tracker<'matcher> for FailureForwarder<'matcher> {
|
impl<'matcher> Tracker<'matcher> for FailureForwarder<'matcher> {
|
||||||
type Failure = (Token, usize, &'static str);
|
type Failure = (Token, u32, &'static str);
|
||||||
|
|
||||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||||
(tok, position, msg)
|
(tok, position, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -452,7 +452,7 @@ impl TtParser {
|
||||||
&mut self,
|
&mut self,
|
||||||
matcher: &'matcher [MatcherLoc],
|
matcher: &'matcher [MatcherLoc],
|
||||||
token: &Token,
|
token: &Token,
|
||||||
approx_position: usize,
|
approx_position: u32,
|
||||||
track: &mut T,
|
track: &mut T,
|
||||||
) -> Option<NamedParseResult<T::Failure>> {
|
) -> Option<NamedParseResult<T::Failure>> {
|
||||||
// Matcher positions that would be valid if the macro invocation was over now. Only
|
// Matcher positions that would be valid if the macro invocation was over now. Only
|
||||||
|
|
|
@ -153,7 +153,7 @@ pub(super) trait Tracker<'matcher> {
|
||||||
/// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
|
/// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
|
||||||
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
|
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
|
||||||
/// The usize is the approximate position of the token in the input token stream.
|
/// The usize is the approximate position of the token in the input token stream.
|
||||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure;
|
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
|
||||||
|
|
||||||
/// This is called before trying to match next MatcherLoc on the current token.
|
/// This is called before trying to match next MatcherLoc on the current token.
|
||||||
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
||||||
|
@ -182,7 +182,7 @@ pub(super) struct NoopTracker;
|
||||||
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
||||||
type Failure = ();
|
type Failure = ();
|
||||||
|
|
||||||
fn build_failure(_tok: Token, _position: usize, _msg: &'static str) -> Self::Failure {}
|
fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
|
||||||
|
|
||||||
fn description() -> &'static str {
|
fn description() -> &'static str {
|
||||||
"none"
|
"none"
|
||||||
|
|
|
@ -11,6 +11,7 @@ use crate::def_id::DefId;
|
||||||
use crate::{MethodKind, Target};
|
use crate::{MethodKind, Target};
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
use rustc_span::symbol::{kw, sym, Symbol};
|
use rustc_span::symbol::{kw, sym, Symbol};
|
||||||
|
@ -23,6 +24,7 @@ pub struct LanguageItems {
|
||||||
/// Mappings from lang items to their possibly found [`DefId`]s.
|
/// Mappings from lang items to their possibly found [`DefId`]s.
|
||||||
/// The index corresponds to the order in [`LangItem`].
|
/// The index corresponds to the order in [`LangItem`].
|
||||||
items: [Option<DefId>; std::mem::variant_count::<LangItem>()],
|
items: [Option<DefId>; std::mem::variant_count::<LangItem>()],
|
||||||
|
reverse_items: FxIndexMap<DefId, LangItem>,
|
||||||
/// Lang items that were not found during collection.
|
/// Lang items that were not found during collection.
|
||||||
pub missing: Vec<LangItem>,
|
pub missing: Vec<LangItem>,
|
||||||
}
|
}
|
||||||
|
@ -30,7 +32,11 @@ pub struct LanguageItems {
|
||||||
impl LanguageItems {
|
impl LanguageItems {
|
||||||
/// Construct an empty collection of lang items and no missing ones.
|
/// Construct an empty collection of lang items and no missing ones.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { items: [None; std::mem::variant_count::<LangItem>()], missing: Vec::new() }
|
Self {
|
||||||
|
items: [None; std::mem::variant_count::<LangItem>()],
|
||||||
|
reverse_items: FxIndexMap::default(),
|
||||||
|
missing: Vec::new(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(&self, item: LangItem) -> Option<DefId> {
|
pub fn get(&self, item: LangItem) -> Option<DefId> {
|
||||||
|
@ -39,6 +45,11 @@ impl LanguageItems {
|
||||||
|
|
||||||
pub fn set(&mut self, item: LangItem, def_id: DefId) {
|
pub fn set(&mut self, item: LangItem, def_id: DefId) {
|
||||||
self.items[item as usize] = Some(def_id);
|
self.items[item as usize] = Some(def_id);
|
||||||
|
self.reverse_items.insert(def_id, item);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_def_id(&self, def_id: DefId) -> Option<LangItem> {
|
||||||
|
self.reverse_items.get(&def_id).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (LangItem, DefId)> + '_ {
|
pub fn iter(&self) -> impl Iterator<Item = (LangItem, DefId)> + '_ {
|
||||||
|
|
|
@ -708,7 +708,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// else an error would have been flagged by the
|
// else an error would have been flagged by the
|
||||||
// `loops` pass for using break with an expression
|
// `loops` pass for using break with an expression
|
||||||
// where you are not supposed to.
|
// where you are not supposed to.
|
||||||
assert!(expr_opt.is_none() || self.dcx().has_errors().is_some());
|
assert!(expr_opt.is_none() || self.tainted_by_errors().is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we encountered a `break`, then (no surprise) it may be possible to break from the
|
// If we encountered a `break`, then (no surprise) it may be possible to break from the
|
||||||
|
|
|
@ -734,9 +734,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx
|
||||||
// struct; however, when EUV is run during typeck, it
|
// struct; however, when EUV is run during typeck, it
|
||||||
// may not. This will generate an error earlier in typeck,
|
// may not. This will generate an error earlier in typeck,
|
||||||
// so we can just ignore it.
|
// so we can just ignore it.
|
||||||
if self.cx.tcx().dcx().has_errors().is_none() {
|
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1652,7 +1652,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
|
|
||||||
self.warn_if_unreachable(stmt.hir_id, stmt.span, "statement");
|
self.warn_if_unreachable(stmt.hir_id, stmt.span, "statement");
|
||||||
|
|
||||||
// Hide the outer diverging and `has_errors` flags.
|
// Hide the outer diverging flags.
|
||||||
let old_diverges = self.diverges.replace(Diverges::Maybe);
|
let old_diverges = self.diverges.replace(Diverges::Maybe);
|
||||||
|
|
||||||
match stmt.kind {
|
match stmt.kind {
|
||||||
|
|
|
@ -510,9 +510,12 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||||
.report_mismatched_types(&cause, method_self_ty, self_ty, terr)
|
.report_mismatched_types(&cause, method_self_ty, self_ty, terr)
|
||||||
.emit();
|
.emit();
|
||||||
} else {
|
} else {
|
||||||
error!("{self_ty} was a subtype of {method_self_ty} but now is not?");
|
// This has/will have errored in wfcheck, which we cannot depend on from here, as typeck on functions
|
||||||
// This must already have errored elsewhere.
|
// may run before wfcheck if the function is used in const eval.
|
||||||
self.dcx().has_errors().unwrap();
|
self.dcx().span_delayed_bug(
|
||||||
|
cause.span(),
|
||||||
|
format!("{self_ty} was a subtype of {method_self_ty} but now is not?"),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -219,28 +219,9 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||||
fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
|
fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
|
||||||
if let hir::ExprKind::Index(ref base, ref index, _) = e.kind {
|
if let hir::ExprKind::Index(ref base, ref index, _) = e.kind {
|
||||||
// All valid indexing looks like this; might encounter non-valid indexes at this point.
|
// All valid indexing looks like this; might encounter non-valid indexes at this point.
|
||||||
let base_ty = self.typeck_results.expr_ty_adjusted_opt(base);
|
let base_ty = self.typeck_results.expr_ty_adjusted(base);
|
||||||
if base_ty.is_none() {
|
if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
|
||||||
// When encountering `return [0][0]` outside of a `fn` body we can encounter a base
|
let index_ty = self.typeck_results.expr_ty_adjusted(index);
|
||||||
// that isn't in the type table. We assume more relevant errors have already been
|
|
||||||
// emitted. (#64638)
|
|
||||||
assert!(self.tcx().dcx().has_errors().is_some(), "bad base: `{base:?}`");
|
|
||||||
}
|
|
||||||
if let Some(base_ty) = base_ty
|
|
||||||
&& let ty::Ref(_, base_ty_inner, _) = *base_ty.kind()
|
|
||||||
{
|
|
||||||
let index_ty =
|
|
||||||
self.typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
|
|
||||||
// When encountering `return [0][0]` outside of a `fn` body we would attempt
|
|
||||||
// to access an nonexistent index. We assume that more relevant errors will
|
|
||||||
// already have been emitted, so we only gate on this with an ICE if no
|
|
||||||
// error has been emitted. (#64638)
|
|
||||||
Ty::new_error_with_message(
|
|
||||||
self.fcx.tcx,
|
|
||||||
e.span,
|
|
||||||
format!("bad index {index:?} for base: `{base:?}`"),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
if self.is_builtin_index(e, base_ty_inner, index_ty) {
|
if self.is_builtin_index(e, base_ty_inner, index_ty) {
|
||||||
// Remove the method call record
|
// Remove the method call record
|
||||||
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
|
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
|
||||||
|
|
|
@ -757,7 +757,6 @@ fn test_unstable_options_tracking_hash() {
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
tracked!(allow_features, Some(vec![String::from("lang_items")]));
|
tracked!(allow_features, Some(vec![String::from("lang_items")]));
|
||||||
tracked!(always_encode_mir, true);
|
tracked!(always_encode_mir, true);
|
||||||
tracked!(asm_comments, true);
|
|
||||||
tracked!(assume_incomplete_release, true);
|
tracked!(assume_incomplete_release, true);
|
||||||
tracked!(binary_dep_depinfo, true);
|
tracked!(binary_dep_depinfo, true);
|
||||||
tracked!(box_noalias, false);
|
tracked!(box_noalias, false);
|
||||||
|
@ -862,6 +861,7 @@ fn test_unstable_options_tracking_hash() {
|
||||||
tracked!(uninit_const_chunk_threshold, 123);
|
tracked!(uninit_const_chunk_threshold, 123);
|
||||||
tracked!(unleash_the_miri_inside_of_you, true);
|
tracked!(unleash_the_miri_inside_of_you, true);
|
||||||
tracked!(use_ctors_section, Some(true));
|
tracked!(use_ctors_section, Some(true));
|
||||||
|
tracked!(verbose_asm, true);
|
||||||
tracked!(verify_llvm_ir, true);
|
tracked!(verify_llvm_ir, true);
|
||||||
tracked!(virtual_function_elimination, true);
|
tracked!(virtual_function_elimination, true);
|
||||||
tracked!(wasi_exec_model, Some(WasiExecModel::Reactor));
|
tracked!(wasi_exec_model, Some(WasiExecModel::Reactor));
|
||||||
|
|
|
@ -407,7 +407,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||||
const char *ABIStr, LLVMRustCodeModel RustCM, LLVMRustRelocModel RustReloc,
|
const char *ABIStr, LLVMRustCodeModel RustCM, LLVMRustRelocModel RustReloc,
|
||||||
LLVMRustCodeGenOptLevel RustOptLevel, bool UseSoftFloat,
|
LLVMRustCodeGenOptLevel RustOptLevel, bool UseSoftFloat,
|
||||||
bool FunctionSections, bool DataSections, bool UniqueSectionNames,
|
bool FunctionSections, bool DataSections, bool UniqueSectionNames,
|
||||||
bool TrapUnreachable, bool Singlethread, bool AsmComments,
|
bool TrapUnreachable, bool Singlethread, bool VerboseAsm,
|
||||||
bool EmitStackSizeSection, bool RelaxELFRelocations, bool UseInitArray,
|
bool EmitStackSizeSection, bool RelaxELFRelocations, bool UseInitArray,
|
||||||
const char *SplitDwarfFile, const char *OutputObjFile,
|
const char *SplitDwarfFile, const char *OutputObjFile,
|
||||||
const char *DebugInfoCompression, bool UseEmulatedTls,
|
const char *DebugInfoCompression, bool UseEmulatedTls,
|
||||||
|
@ -435,8 +435,9 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||||
Options.DataSections = DataSections;
|
Options.DataSections = DataSections;
|
||||||
Options.FunctionSections = FunctionSections;
|
Options.FunctionSections = FunctionSections;
|
||||||
Options.UniqueSectionNames = UniqueSectionNames;
|
Options.UniqueSectionNames = UniqueSectionNames;
|
||||||
Options.MCOptions.AsmVerbose = AsmComments;
|
Options.MCOptions.AsmVerbose = VerboseAsm;
|
||||||
Options.MCOptions.PreserveAsmComments = AsmComments;
|
// Always preserve comments that were written by the user
|
||||||
|
Options.MCOptions.PreserveAsmComments = true;
|
||||||
Options.MCOptions.ABIName = ABIStr;
|
Options.MCOptions.ABIName = ABIStr;
|
||||||
if (SplitDwarfFile) {
|
if (SplitDwarfFile) {
|
||||||
Options.MCOptions.SplitDwarfFile = SplitDwarfFile;
|
Options.MCOptions.SplitDwarfFile = SplitDwarfFile;
|
||||||
|
|
|
@ -27,6 +27,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
self.lang_items().get(lang_item) == Some(def_id)
|
self.lang_items().get(lang_item) == Some(def_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_lang_item(self, def_id: DefId) -> Option<LangItem> {
|
||||||
|
self.lang_items().from_def_id(def_id)
|
||||||
|
}
|
||||||
|
|
||||||
/// Given a [`DefId`] of one of the [`Fn`], [`FnMut`] or [`FnOnce`] traits,
|
/// Given a [`DefId`] of one of the [`Fn`], [`FnMut`] or [`FnOnce`] traits,
|
||||||
/// returns a corresponding [`ty::ClosureKind`].
|
/// returns a corresponding [`ty::ClosureKind`].
|
||||||
/// For any other [`DefId`] return `None`.
|
/// For any other [`DefId`] return `None`.
|
||||||
|
|
|
@ -366,6 +366,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||||
self.is_lang_item(def_id, trait_lang_item_to_lang_item(lang_item))
|
self.is_lang_item(def_id, trait_lang_item_to_lang_item(lang_item))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn as_lang_item(self, def_id: DefId) -> Option<TraitSolverLangItem> {
|
||||||
|
lang_item_to_trait_lang_item(self.lang_items().from_def_id(def_id)?)
|
||||||
|
}
|
||||||
|
|
||||||
fn associated_type_def_ids(self, def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
fn associated_type_def_ids(self, def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
||||||
self.associated_items(def_id)
|
self.associated_items(def_id)
|
||||||
.in_definition_order()
|
.in_definition_order()
|
||||||
|
@ -522,14 +526,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||||
self.trait_def(trait_def_id).implement_via_object
|
self.trait_def(trait_def_id).implement_via_object
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fn_trait_kind_from_def_id(self, trait_def_id: DefId) -> Option<ty::ClosureKind> {
|
|
||||||
self.fn_trait_kind_from_def_id(trait_def_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn async_fn_trait_kind_from_def_id(self, trait_def_id: DefId) -> Option<ty::ClosureKind> {
|
|
||||||
self.async_fn_trait_kind_from_def_id(trait_def_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supertrait_def_ids(self, trait_def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
fn supertrait_def_ids(self, trait_def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
||||||
self.supertrait_def_ids(trait_def_id)
|
self.supertrait_def_ids(trait_def_id)
|
||||||
}
|
}
|
||||||
|
@ -573,46 +569,69 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
|
macro_rules! bidirectional_lang_item_map {
|
||||||
match lang_item {
|
($($name:ident),+ $(,)?) => {
|
||||||
TraitSolverLangItem::AsyncDestruct => LangItem::AsyncDestruct,
|
fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
|
||||||
TraitSolverLangItem::AsyncFnKindHelper => LangItem::AsyncFnKindHelper,
|
match lang_item {
|
||||||
TraitSolverLangItem::AsyncFnKindUpvars => LangItem::AsyncFnKindUpvars,
|
$(TraitSolverLangItem::$name => LangItem::$name,)+
|
||||||
TraitSolverLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
|
}
|
||||||
TraitSolverLangItem::AsyncIterator => LangItem::AsyncIterator,
|
}
|
||||||
TraitSolverLangItem::CallOnceFuture => LangItem::CallOnceFuture,
|
|
||||||
TraitSolverLangItem::CallRefFuture => LangItem::CallRefFuture,
|
fn lang_item_to_trait_lang_item(lang_item: LangItem) -> Option<TraitSolverLangItem> {
|
||||||
TraitSolverLangItem::Clone => LangItem::Clone,
|
Some(match lang_item {
|
||||||
TraitSolverLangItem::Copy => LangItem::Copy,
|
$(LangItem::$name => TraitSolverLangItem::$name,)+
|
||||||
TraitSolverLangItem::Coroutine => LangItem::Coroutine,
|
_ => return None,
|
||||||
TraitSolverLangItem::CoroutineReturn => LangItem::CoroutineReturn,
|
})
|
||||||
TraitSolverLangItem::CoroutineYield => LangItem::CoroutineYield,
|
}
|
||||||
TraitSolverLangItem::Destruct => LangItem::Destruct,
|
|
||||||
TraitSolverLangItem::DiscriminantKind => LangItem::DiscriminantKind,
|
|
||||||
TraitSolverLangItem::DynMetadata => LangItem::DynMetadata,
|
|
||||||
TraitSolverLangItem::EffectsMaybe => LangItem::EffectsMaybe,
|
|
||||||
TraitSolverLangItem::EffectsIntersection => LangItem::EffectsIntersection,
|
|
||||||
TraitSolverLangItem::EffectsIntersectionOutput => LangItem::EffectsIntersectionOutput,
|
|
||||||
TraitSolverLangItem::EffectsNoRuntime => LangItem::EffectsNoRuntime,
|
|
||||||
TraitSolverLangItem::EffectsRuntime => LangItem::EffectsRuntime,
|
|
||||||
TraitSolverLangItem::FnPtrTrait => LangItem::FnPtrTrait,
|
|
||||||
TraitSolverLangItem::FusedIterator => LangItem::FusedIterator,
|
|
||||||
TraitSolverLangItem::Future => LangItem::Future,
|
|
||||||
TraitSolverLangItem::FutureOutput => LangItem::FutureOutput,
|
|
||||||
TraitSolverLangItem::Iterator => LangItem::Iterator,
|
|
||||||
TraitSolverLangItem::Metadata => LangItem::Metadata,
|
|
||||||
TraitSolverLangItem::Option => LangItem::Option,
|
|
||||||
TraitSolverLangItem::PointeeTrait => LangItem::PointeeTrait,
|
|
||||||
TraitSolverLangItem::PointerLike => LangItem::PointerLike,
|
|
||||||
TraitSolverLangItem::Poll => LangItem::Poll,
|
|
||||||
TraitSolverLangItem::Sized => LangItem::Sized,
|
|
||||||
TraitSolverLangItem::TransmuteTrait => LangItem::TransmuteTrait,
|
|
||||||
TraitSolverLangItem::Tuple => LangItem::Tuple,
|
|
||||||
TraitSolverLangItem::Unpin => LangItem::Unpin,
|
|
||||||
TraitSolverLangItem::Unsize => LangItem::Unsize,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bidirectional_lang_item_map! {
|
||||||
|
// tidy-alphabetical-start
|
||||||
|
AsyncDestruct,
|
||||||
|
AsyncFn,
|
||||||
|
AsyncFnKindHelper,
|
||||||
|
AsyncFnKindUpvars,
|
||||||
|
AsyncFnMut,
|
||||||
|
AsyncFnOnce,
|
||||||
|
AsyncFnOnceOutput,
|
||||||
|
AsyncIterator,
|
||||||
|
CallOnceFuture,
|
||||||
|
CallRefFuture,
|
||||||
|
Clone,
|
||||||
|
Copy,
|
||||||
|
Coroutine,
|
||||||
|
CoroutineReturn,
|
||||||
|
CoroutineYield,
|
||||||
|
Destruct,
|
||||||
|
DiscriminantKind,
|
||||||
|
DynMetadata,
|
||||||
|
EffectsIntersection,
|
||||||
|
EffectsIntersectionOutput,
|
||||||
|
EffectsMaybe,
|
||||||
|
EffectsNoRuntime,
|
||||||
|
EffectsRuntime,
|
||||||
|
Fn,
|
||||||
|
FnMut,
|
||||||
|
FnOnce,
|
||||||
|
FnPtrTrait,
|
||||||
|
FusedIterator,
|
||||||
|
Future,
|
||||||
|
FutureOutput,
|
||||||
|
Iterator,
|
||||||
|
Metadata,
|
||||||
|
Option,
|
||||||
|
PointeeTrait,
|
||||||
|
PointerLike,
|
||||||
|
Poll,
|
||||||
|
Sized,
|
||||||
|
TransmuteTrait,
|
||||||
|
Tuple,
|
||||||
|
Unpin,
|
||||||
|
Unsize,
|
||||||
|
// tidy-alphabetical-end
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tcx> rustc_type_ir::inherent::DefId<TyCtxt<'tcx>> for DefId {
|
impl<'tcx> rustc_type_ir::inherent::DefId<TyCtxt<'tcx>> for DefId {
|
||||||
fn as_local(self) -> Option<LocalDefId> {
|
fn as_local(self) -> Option<LocalDefId> {
|
||||||
self.as_local()
|
self.as_local()
|
||||||
|
|
|
@ -387,48 +387,83 @@ where
|
||||||
G::consider_auto_trait_candidate(self, goal)
|
G::consider_auto_trait_candidate(self, goal)
|
||||||
} else if cx.trait_is_alias(trait_def_id) {
|
} else if cx.trait_is_alias(trait_def_id) {
|
||||||
G::consider_trait_alias_candidate(self, goal)
|
G::consider_trait_alias_candidate(self, goal)
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Sized) {
|
|
||||||
G::consider_builtin_sized_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Copy)
|
|
||||||
|| cx.is_lang_item(trait_def_id, TraitSolverLangItem::Clone)
|
|
||||||
{
|
|
||||||
G::consider_builtin_copy_clone_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::PointerLike) {
|
|
||||||
G::consider_builtin_pointer_like_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::FnPtrTrait) {
|
|
||||||
G::consider_builtin_fn_ptr_trait_candidate(self, goal)
|
|
||||||
} else if let Some(kind) = self.cx().fn_trait_kind_from_def_id(trait_def_id) {
|
|
||||||
G::consider_builtin_fn_trait_candidates(self, goal, kind)
|
|
||||||
} else if let Some(kind) = self.cx().async_fn_trait_kind_from_def_id(trait_def_id) {
|
|
||||||
G::consider_builtin_async_fn_trait_candidates(self, goal, kind)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncFnKindHelper) {
|
|
||||||
G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Tuple) {
|
|
||||||
G::consider_builtin_tuple_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::PointeeTrait) {
|
|
||||||
G::consider_builtin_pointee_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Future) {
|
|
||||||
G::consider_builtin_future_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Iterator) {
|
|
||||||
G::consider_builtin_iterator_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::FusedIterator) {
|
|
||||||
G::consider_builtin_fused_iterator_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncIterator) {
|
|
||||||
G::consider_builtin_async_iterator_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Coroutine) {
|
|
||||||
G::consider_builtin_coroutine_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::DiscriminantKind) {
|
|
||||||
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncDestruct) {
|
|
||||||
G::consider_builtin_async_destruct_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Destruct) {
|
|
||||||
G::consider_builtin_destruct_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::TransmuteTrait) {
|
|
||||||
G::consider_builtin_transmute_candidate(self, goal)
|
|
||||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::EffectsIntersection) {
|
|
||||||
G::consider_builtin_effects_intersection_candidate(self, goal)
|
|
||||||
} else {
|
} else {
|
||||||
Err(NoSolution)
|
match cx.as_lang_item(trait_def_id) {
|
||||||
|
Some(TraitSolverLangItem::Sized) => G::consider_builtin_sized_candidate(self, goal),
|
||||||
|
Some(TraitSolverLangItem::Copy | TraitSolverLangItem::Clone) => {
|
||||||
|
G::consider_builtin_copy_clone_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::Fn) => {
|
||||||
|
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::FnMut) => {
|
||||||
|
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnMut)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::FnOnce) => {
|
||||||
|
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnOnce)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::AsyncFn) => {
|
||||||
|
G::consider_builtin_async_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::AsyncFnMut) => {
|
||||||
|
G::consider_builtin_async_fn_trait_candidates(
|
||||||
|
self,
|
||||||
|
goal,
|
||||||
|
ty::ClosureKind::FnMut,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::AsyncFnOnce) => {
|
||||||
|
G::consider_builtin_async_fn_trait_candidates(
|
||||||
|
self,
|
||||||
|
goal,
|
||||||
|
ty::ClosureKind::FnOnce,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::PointerLike) => {
|
||||||
|
G::consider_builtin_pointer_like_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::FnPtrTrait) => {
|
||||||
|
G::consider_builtin_fn_ptr_trait_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::AsyncFnKindHelper) => {
|
||||||
|
G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::Tuple) => G::consider_builtin_tuple_candidate(self, goal),
|
||||||
|
Some(TraitSolverLangItem::PointeeTrait) => {
|
||||||
|
G::consider_builtin_pointee_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::Future) => {
|
||||||
|
G::consider_builtin_future_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::Iterator) => {
|
||||||
|
G::consider_builtin_iterator_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::FusedIterator) => {
|
||||||
|
G::consider_builtin_fused_iterator_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::AsyncIterator) => {
|
||||||
|
G::consider_builtin_async_iterator_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::Coroutine) => {
|
||||||
|
G::consider_builtin_coroutine_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::DiscriminantKind) => {
|
||||||
|
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::AsyncDestruct) => {
|
||||||
|
G::consider_builtin_async_destruct_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::Destruct) => {
|
||||||
|
G::consider_builtin_destruct_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::TransmuteTrait) => {
|
||||||
|
G::consider_builtin_transmute_candidate(self, goal)
|
||||||
|
}
|
||||||
|
Some(TraitSolverLangItem::EffectsIntersection) => {
|
||||||
|
G::consider_builtin_effects_intersection_candidate(self, goal)
|
||||||
|
}
|
||||||
|
_ => Err(NoSolution),
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
candidates.extend(result);
|
candidates.extend(result);
|
||||||
|
|
|
@ -9,6 +9,7 @@ use rustc_session::parse::ParseSess;
|
||||||
use rustc_span::{sym, Span, DUMMY_SP};
|
use rustc_span::{sym, Span, DUMMY_SP};
|
||||||
|
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
use std::{iter, mem};
|
||||||
|
|
||||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||||
/// When we parse outer attributes, we need to ensure that we capture tokens
|
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||||
|
@ -29,15 +30,15 @@ pub struct AttrWrapper {
|
||||||
// The start of the outer attributes in the token cursor.
|
// The start of the outer attributes in the token cursor.
|
||||||
// This allows us to create a `ReplaceRange` for the entire attribute
|
// This allows us to create a `ReplaceRange` for the entire attribute
|
||||||
// target, including outer attributes.
|
// target, including outer attributes.
|
||||||
start_pos: usize,
|
start_pos: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AttrWrapper {
|
impl AttrWrapper {
|
||||||
pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
|
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||||
AttrWrapper { attrs, start_pos }
|
AttrWrapper { attrs, start_pos }
|
||||||
}
|
}
|
||||||
pub fn empty() -> AttrWrapper {
|
pub fn empty() -> AttrWrapper {
|
||||||
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
|
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||||
|
@ -53,7 +54,7 @@ impl AttrWrapper {
|
||||||
// FIXME: require passing an NT to prevent misuse of this method
|
// FIXME: require passing an NT to prevent misuse of this method
|
||||||
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
|
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
|
||||||
let mut self_attrs = self.attrs;
|
let mut self_attrs = self.attrs;
|
||||||
std::mem::swap(attrs, &mut self_attrs);
|
mem::swap(attrs, &mut self_attrs);
|
||||||
attrs.extend(self_attrs);
|
attrs.extend(self_attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,7 +92,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||||
struct LazyAttrTokenStreamImpl {
|
struct LazyAttrTokenStreamImpl {
|
||||||
start_token: (Token, Spacing),
|
start_token: (Token, Spacing),
|
||||||
cursor_snapshot: TokenCursor,
|
cursor_snapshot: TokenCursor,
|
||||||
num_calls: usize,
|
num_calls: u32,
|
||||||
break_last_token: bool,
|
break_last_token: bool,
|
||||||
replace_ranges: Box<[ReplaceRange]>,
|
replace_ranges: Box<[ReplaceRange]>,
|
||||||
}
|
}
|
||||||
|
@ -104,15 +105,16 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
// produce an empty `TokenStream` if no calls were made, and omit the
|
// produce an empty `TokenStream` if no calls were made, and omit the
|
||||||
// final token otherwise.
|
// final token otherwise.
|
||||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||||
let tokens =
|
let tokens = iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
|
||||||
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
|
.chain(iter::repeat_with(|| {
|
||||||
.chain(std::iter::repeat_with(|| {
|
let token = cursor_snapshot.next();
|
||||||
let token = cursor_snapshot.next();
|
(FlatToken::Token(token.0), token.1)
|
||||||
(FlatToken::Token(token.0), token.1)
|
}))
|
||||||
}))
|
.take(self.num_calls as usize);
|
||||||
.take(self.num_calls);
|
|
||||||
|
|
||||||
if !self.replace_ranges.is_empty() {
|
if self.replace_ranges.is_empty() {
|
||||||
|
make_attr_token_stream(tokens, self.break_last_token)
|
||||||
|
} else {
|
||||||
let mut tokens: Vec<_> = tokens.collect();
|
let mut tokens: Vec<_> = tokens.collect();
|
||||||
let mut replace_ranges = self.replace_ranges.to_vec();
|
let mut replace_ranges = self.replace_ranges.to_vec();
|
||||||
replace_ranges.sort_by_key(|(range, _)| range.start);
|
replace_ranges.sort_by_key(|(range, _)| range.start);
|
||||||
|
@ -156,7 +158,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
// This keeps the total length of `tokens` constant throughout the
|
// This keeps the total length of `tokens` constant throughout the
|
||||||
// replacement process, allowing us to use all of the `ReplaceRanges` entries
|
// replacement process, allowing us to use all of the `ReplaceRanges` entries
|
||||||
// without adjusting indices.
|
// without adjusting indices.
|
||||||
let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
|
let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||||
.take(range.len() - new_tokens.len());
|
.take(range.len() - new_tokens.len());
|
||||||
|
|
||||||
tokens.splice(
|
tokens.splice(
|
||||||
|
@ -164,9 +166,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
new_tokens.into_iter().chain(filler),
|
new_tokens.into_iter().chain(filler),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
make_token_stream(tokens.into_iter(), self.break_last_token)
|
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
|
||||||
} else {
|
|
||||||
make_token_stream(tokens, self.break_last_token)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -218,24 +218,23 @@ impl<'a> Parser<'a> {
|
||||||
let start_token = (self.token.clone(), self.token_spacing);
|
let start_token = (self.token.clone(), self.token_spacing);
|
||||||
let cursor_snapshot = self.token_cursor.clone();
|
let cursor_snapshot = self.token_cursor.clone();
|
||||||
let start_pos = self.num_bump_calls;
|
let start_pos = self.num_bump_calls;
|
||||||
|
|
||||||
let has_outer_attrs = !attrs.attrs.is_empty();
|
let has_outer_attrs = !attrs.attrs.is_empty();
|
||||||
let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
|
||||||
let replace_ranges_start = self.capture_state.replace_ranges.len();
|
let replace_ranges_start = self.capture_state.replace_ranges.len();
|
||||||
|
|
||||||
let ret = f(self, attrs.attrs);
|
let (mut ret, trailing) = {
|
||||||
|
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||||
self.capture_state.capturing = prev_capturing;
|
let ret_and_trailing = f(self, attrs.attrs);
|
||||||
|
self.capture_state.capturing = prev_capturing;
|
||||||
let (mut ret, trailing) = ret?;
|
ret_and_trailing?
|
||||||
|
};
|
||||||
|
|
||||||
// When we're not in `capture-cfg` mode, then bail out early if:
|
// When we're not in `capture-cfg` mode, then bail out early if:
|
||||||
// 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
|
// 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
|
||||||
// so there's nothing for us to do.
|
// so there's nothing for us to do.
|
||||||
// 2. Our target already has tokens set (e.g. we've parsed something
|
// 2. Our target already has tokens set (e.g. we've parsed something
|
||||||
// like `#[my_attr] $item`. The actual parsing code takes care of prepending
|
// like `#[my_attr] $item`). The actual parsing code takes care of
|
||||||
// any attributes to the nonterminal, so we don't need to modify the
|
// prepending any attributes to the nonterminal, so we don't need to
|
||||||
// already captured tokens.
|
// modify the already captured tokens.
|
||||||
// Note that this check is independent of `force_collect`- if we already
|
// Note that this check is independent of `force_collect`- if we already
|
||||||
// have tokens, or can't even store them, then there's never a need to
|
// have tokens, or can't even store them, then there's never a need to
|
||||||
// force collection of new tokens.
|
// force collection of new tokens.
|
||||||
|
@ -276,37 +275,32 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
let replace_ranges_end = self.capture_state.replace_ranges.len();
|
let replace_ranges_end = self.capture_state.replace_ranges.len();
|
||||||
|
|
||||||
let mut end_pos = self.num_bump_calls;
|
|
||||||
|
|
||||||
let mut captured_trailing = false;
|
|
||||||
|
|
||||||
// Capture a trailing token if requested by the callback 'f'
|
// Capture a trailing token if requested by the callback 'f'
|
||||||
match trailing {
|
let captured_trailing = match trailing {
|
||||||
TrailingToken::None => {}
|
TrailingToken::None => false,
|
||||||
TrailingToken::Gt => {
|
TrailingToken::Gt => {
|
||||||
assert_eq!(self.token.kind, token::Gt);
|
assert_eq!(self.token.kind, token::Gt);
|
||||||
|
false
|
||||||
}
|
}
|
||||||
TrailingToken::Semi => {
|
TrailingToken::Semi => {
|
||||||
assert_eq!(self.token.kind, token::Semi);
|
assert_eq!(self.token.kind, token::Semi);
|
||||||
end_pos += 1;
|
true
|
||||||
captured_trailing = true;
|
|
||||||
}
|
}
|
||||||
TrailingToken::MaybeComma => {
|
TrailingToken::MaybeComma => self.token.kind == token::Comma,
|
||||||
if self.token.kind == token::Comma {
|
};
|
||||||
end_pos += 1;
|
|
||||||
captured_trailing = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
assert!(
|
||||||
// then extend the range of captured tokens to include it, since the parser
|
!(self.break_last_token && captured_trailing),
|
||||||
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
|
"Cannot set break_last_token and have trailing token"
|
||||||
// into an `AttrTokenStream`, we will create the proper token.
|
);
|
||||||
if self.break_last_token {
|
|
||||||
assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
|
let end_pos = self.num_bump_calls
|
||||||
end_pos += 1;
|
+ captured_trailing as u32
|
||||||
}
|
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
|
||||||
|
// extend the range of captured tokens to include it, since the parser was not actually
|
||||||
|
// bumped past it. When the `LazyAttrTokenStream` gets converted into an
|
||||||
|
// `AttrTokenStream`, we will create the proper token.
|
||||||
|
+ self.break_last_token as u32;
|
||||||
|
|
||||||
let num_calls = end_pos - start_pos;
|
let num_calls = end_pos - start_pos;
|
||||||
|
|
||||||
|
@ -318,14 +312,11 @@ impl<'a> Parser<'a> {
|
||||||
// Grab any replace ranges that occur *inside* the current AST node.
|
// Grab any replace ranges that occur *inside* the current AST node.
|
||||||
// We will perform the actual replacement when we convert the `LazyAttrTokenStream`
|
// We will perform the actual replacement when we convert the `LazyAttrTokenStream`
|
||||||
// to an `AttrTokenStream`.
|
// to an `AttrTokenStream`.
|
||||||
let start_calls: u32 = start_pos.try_into().unwrap();
|
|
||||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.chain(inner_attr_replace_ranges.iter().cloned())
|
.chain(inner_attr_replace_ranges.iter().cloned())
|
||||||
.map(|(range, tokens)| {
|
.map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
|
||||||
((range.start - start_calls)..(range.end - start_calls), tokens)
|
|
||||||
})
|
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -340,7 +331,7 @@ impl<'a> Parser<'a> {
|
||||||
// If we support tokens at all
|
// If we support tokens at all
|
||||||
if let Some(target_tokens) = ret.tokens_mut() {
|
if let Some(target_tokens) = ret.tokens_mut() {
|
||||||
if target_tokens.is_none() {
|
if target_tokens.is_none() {
|
||||||
// Store se our newly captured tokens into the AST node
|
// Store our newly captured tokens into the AST node.
|
||||||
*target_tokens = Some(tokens.clone());
|
*target_tokens = Some(tokens.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -382,10 +373,10 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
|
||||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
|
||||||
/// of open and close delims.
|
/// close delims.
|
||||||
fn make_token_stream(
|
fn make_attr_token_stream(
|
||||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||||
break_last_token: bool,
|
break_last_token: bool,
|
||||||
) -> AttrTokenStream {
|
) -> AttrTokenStream {
|
||||||
|
@ -464,6 +455,6 @@ mod size_asserts {
|
||||||
use rustc_data_structures::static_assert_size;
|
use rustc_data_structures::static_assert_size;
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
static_assert_size!(AttrWrapper, 16);
|
static_assert_size!(AttrWrapper, 16);
|
||||||
static_assert_size!(LazyAttrTokenStreamImpl, 104);
|
static_assert_size!(LazyAttrTokenStreamImpl, 96);
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
}
|
}
|
||||||
|
|
|
@ -153,7 +153,7 @@ pub struct Parser<'a> {
|
||||||
expected_tokens: Vec<TokenType>,
|
expected_tokens: Vec<TokenType>,
|
||||||
token_cursor: TokenCursor,
|
token_cursor: TokenCursor,
|
||||||
// The number of calls to `bump`, i.e. the position in the token stream.
|
// The number of calls to `bump`, i.e. the position in the token stream.
|
||||||
num_bump_calls: usize,
|
num_bump_calls: u32,
|
||||||
// During parsing we may sometimes need to 'unglue' a glued token into two
|
// During parsing we may sometimes need to 'unglue' a glued token into two
|
||||||
// component tokens (e.g. '>>' into '>' and '>), so the parser can consume
|
// component tokens (e.g. '>>' into '>' and '>), so the parser can consume
|
||||||
// them one at a time. This process bypasses the normal capturing mechanism
|
// them one at a time. This process bypasses the normal capturing mechanism
|
||||||
|
@ -192,7 +192,7 @@ pub struct Parser<'a> {
|
||||||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||||
// it doesn't unintentionally get bigger.
|
// it doesn't unintentionally get bigger.
|
||||||
#[cfg(target_pointer_width = "64")]
|
#[cfg(target_pointer_width = "64")]
|
||||||
rustc_data_structures::static_assert_size!(Parser<'_>, 264);
|
rustc_data_structures::static_assert_size!(Parser<'_>, 256);
|
||||||
|
|
||||||
/// Stores span information about a closure.
|
/// Stores span information about a closure.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -1572,7 +1572,7 @@ impl<'a> Parser<'a> {
|
||||||
self.expected_tokens.clear();
|
self.expected_tokens.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn approx_token_stream_pos(&self) -> usize {
|
pub fn approx_token_stream_pos(&self) -> u32 {
|
||||||
self.num_bump_calls
|
self.num_bump_calls
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1630,8 +1630,6 @@ options! {
|
||||||
"only allow the listed language features to be enabled in code (comma separated)"),
|
"only allow the listed language features to be enabled in code (comma separated)"),
|
||||||
always_encode_mir: bool = (false, parse_bool, [TRACKED],
|
always_encode_mir: bool = (false, parse_bool, [TRACKED],
|
||||||
"encode MIR of all functions into the crate metadata (default: no)"),
|
"encode MIR of all functions into the crate metadata (default: no)"),
|
||||||
asm_comments: bool = (false, parse_bool, [TRACKED],
|
|
||||||
"generate comments into the assembly (may change behavior) (default: no)"),
|
|
||||||
assert_incr_state: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
assert_incr_state: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
||||||
"assert that the incremental cache is in given state: \
|
"assert that the incremental cache is in given state: \
|
||||||
either `loaded` or `not-loaded`."),
|
either `loaded` or `not-loaded`."),
|
||||||
|
@ -2107,6 +2105,8 @@ written to standard error output)"),
|
||||||
"Generate sync unwind tables instead of async unwind tables (default: no)"),
|
"Generate sync unwind tables instead of async unwind tables (default: no)"),
|
||||||
validate_mir: bool = (false, parse_bool, [UNTRACKED],
|
validate_mir: bool = (false, parse_bool, [UNTRACKED],
|
||||||
"validate MIR after each transformation"),
|
"validate MIR after each transformation"),
|
||||||
|
verbose_asm: bool = (false, parse_bool, [TRACKED],
|
||||||
|
"add descriptive comments from LLVM to the assembly (may change behavior) (default: no)"),
|
||||||
#[rustc_lint_opt_deny_field_access("use `Session::verbose_internals` instead of this field")]
|
#[rustc_lint_opt_deny_field_access("use `Session::verbose_internals` instead of this field")]
|
||||||
verbose_internals: bool = (false, parse_bool, [TRACKED_NO_CRATE_HASH],
|
verbose_internals: bool = (false, parse_bool, [TRACKED_NO_CRATE_HASH],
|
||||||
"in general, enable more debug printouts (default: no)"),
|
"in general, enable more debug printouts (default: no)"),
|
||||||
|
|
|
@ -220,6 +220,8 @@ pub trait Interner:
|
||||||
|
|
||||||
fn is_lang_item(self, def_id: Self::DefId, lang_item: TraitSolverLangItem) -> bool;
|
fn is_lang_item(self, def_id: Self::DefId, lang_item: TraitSolverLangItem) -> bool;
|
||||||
|
|
||||||
|
fn as_lang_item(self, def_id: Self::DefId) -> Option<TraitSolverLangItem>;
|
||||||
|
|
||||||
fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator<Item = Self::DefId>;
|
fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator<Item = Self::DefId>;
|
||||||
|
|
||||||
fn for_each_relevant_impl(
|
fn for_each_relevant_impl(
|
||||||
|
@ -245,10 +247,6 @@ pub trait Interner:
|
||||||
|
|
||||||
fn trait_may_be_implemented_via_object(self, trait_def_id: Self::DefId) -> bool;
|
fn trait_may_be_implemented_via_object(self, trait_def_id: Self::DefId) -> bool;
|
||||||
|
|
||||||
fn fn_trait_kind_from_def_id(self, trait_def_id: Self::DefId) -> Option<ty::ClosureKind>;
|
|
||||||
|
|
||||||
fn async_fn_trait_kind_from_def_id(self, trait_def_id: Self::DefId) -> Option<ty::ClosureKind>;
|
|
||||||
|
|
||||||
fn supertrait_def_ids(self, trait_def_id: Self::DefId)
|
fn supertrait_def_ids(self, trait_def_id: Self::DefId)
|
||||||
-> impl IntoIterator<Item = Self::DefId>;
|
-> impl IntoIterator<Item = Self::DefId>;
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,11 @@
|
||||||
pub enum TraitSolverLangItem {
|
pub enum TraitSolverLangItem {
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
AsyncDestruct,
|
AsyncDestruct,
|
||||||
|
AsyncFn,
|
||||||
AsyncFnKindHelper,
|
AsyncFnKindHelper,
|
||||||
AsyncFnKindUpvars,
|
AsyncFnKindUpvars,
|
||||||
|
AsyncFnMut,
|
||||||
|
AsyncFnOnce,
|
||||||
AsyncFnOnceOutput,
|
AsyncFnOnceOutput,
|
||||||
AsyncIterator,
|
AsyncIterator,
|
||||||
CallOnceFuture,
|
CallOnceFuture,
|
||||||
|
@ -22,6 +25,9 @@ pub enum TraitSolverLangItem {
|
||||||
EffectsMaybe,
|
EffectsMaybe,
|
||||||
EffectsNoRuntime,
|
EffectsNoRuntime,
|
||||||
EffectsRuntime,
|
EffectsRuntime,
|
||||||
|
Fn,
|
||||||
|
FnMut,
|
||||||
|
FnOnce,
|
||||||
FnPtrTrait,
|
FnPtrTrait,
|
||||||
FusedIterator,
|
FusedIterator,
|
||||||
Future,
|
Future,
|
||||||
|
|
|
@ -317,9 +317,7 @@ fn format_rusage_data(child: Child) -> Option<String> {
|
||||||
|
|
||||||
use windows::{
|
use windows::{
|
||||||
Win32::Foundation::HANDLE,
|
Win32::Foundation::HANDLE,
|
||||||
Win32::System::ProcessStatus::{
|
Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS},
|
||||||
K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS, PROCESS_MEMORY_COUNTERS_EX,
|
|
||||||
},
|
|
||||||
Win32::System::Threading::GetProcessTimes,
|
Win32::System::Threading::GetProcessTimes,
|
||||||
Win32::System::Time::FileTimeToSystemTime,
|
Win32::System::Time::FileTimeToSystemTime,
|
||||||
};
|
};
|
||||||
|
@ -331,6 +329,7 @@ fn format_rusage_data(child: Child) -> Option<String> {
|
||||||
let mut kernel_filetime = Default::default();
|
let mut kernel_filetime = Default::default();
|
||||||
let mut kernel_time = Default::default();
|
let mut kernel_time = Default::default();
|
||||||
let mut memory_counters = PROCESS_MEMORY_COUNTERS::default();
|
let mut memory_counters = PROCESS_MEMORY_COUNTERS::default();
|
||||||
|
let memory_counters_size = std::mem::size_of_val(&memory_counters);
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
GetProcessTimes(
|
GetProcessTimes(
|
||||||
|
@ -347,15 +346,9 @@ fn format_rusage_data(child: Child) -> Option<String> {
|
||||||
|
|
||||||
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
|
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
|
||||||
// with the given handle and none of that process's children.
|
// with the given handle and none of that process's children.
|
||||||
unsafe {
|
unsafe { K32GetProcessMemoryInfo(handle, &mut memory_counters, memory_counters_size as u32) }
|
||||||
K32GetProcessMemoryInfo(
|
.ok()
|
||||||
handle,
|
.ok()?;
|
||||||
&mut memory_counters,
|
|
||||||
std::mem::size_of::<PROCESS_MEMORY_COUNTERS_EX>() as u32,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
.ok()
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
// Guide on interpreting these numbers:
|
// Guide on interpreting these numbers:
|
||||||
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
|
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
|
||||||
|
|
|
@ -9,9 +9,9 @@ use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use crate::core::builder::Builder;
|
use crate::core::builder::Builder;
|
||||||
use crate::core::{build_steps::dist::distdir, builder::Kind};
|
use crate::core::{build_steps::dist::distdir, builder::Kind};
|
||||||
use crate::utils::channel;
|
|
||||||
use crate::utils::exec::BootstrapCommand;
|
use crate::utils::exec::BootstrapCommand;
|
||||||
use crate::utils::helpers::{move_file, t};
|
use crate::utils::helpers::{move_file, t};
|
||||||
|
use crate::utils::{channel, helpers};
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub(crate) enum OverlayKind {
|
pub(crate) enum OverlayKind {
|
||||||
|
@ -351,6 +351,30 @@ impl<'a> Tarball<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
cmd.args(["--compression-profile", compression_profile]);
|
cmd.args(["--compression-profile", compression_profile]);
|
||||||
|
|
||||||
|
// We want to use a pinned modification time for files in the archive
|
||||||
|
// to achieve better reproducibility. However, using the same mtime for all
|
||||||
|
// releases is not ideal, because it can break e.g. Cargo mtime checking
|
||||||
|
// (https://github.com/rust-lang/rust/issues/125578).
|
||||||
|
// Therefore, we set mtime to the date of the latest commit (if we're managed
|
||||||
|
// by git). In this way, the archive will still be always the same for a given commit
|
||||||
|
// (achieving reproducibility), but it will also change between different commits and
|
||||||
|
// Rust versions, so that it won't break mtime-based caches.
|
||||||
|
//
|
||||||
|
// Note that this only overrides the mtime of files, not directories, due to the
|
||||||
|
// limitations of the tarballer tool. Directories will have their mtime set to 2006.
|
||||||
|
|
||||||
|
// Get the UTC timestamp of the last git commit, if we're under git.
|
||||||
|
// We need to use UTC, so that anyone who tries to rebuild from the same commit
|
||||||
|
// gets the same timestamp.
|
||||||
|
if self.builder.rust_info().is_managed_git_subrepository() {
|
||||||
|
// %ct means committer date
|
||||||
|
let timestamp = helpers::output(
|
||||||
|
helpers::git(Some(&self.builder.src)).arg("log").arg("-1").arg("--format=%ct"),
|
||||||
|
);
|
||||||
|
cmd.args(["--override-file-mtime", timestamp.trim()]);
|
||||||
|
}
|
||||||
|
|
||||||
self.builder.run(cmd);
|
self.builder.run(cmd);
|
||||||
|
|
||||||
// Ensure there are no symbolic links in the tarball. In particular,
|
// Ensure there are no symbolic links in the tarball. In particular,
|
||||||
|
|
70
src/doc/unstable-book/src/compiler-flags/verbose-asm.md
Normal file
70
src/doc/unstable-book/src/compiler-flags/verbose-asm.md
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
# `verbose-asm`
|
||||||
|
|
||||||
|
The tracking issue for this feature is: [#126802](https://github.com/rust-lang/rust/issues/126802).
|
||||||
|
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
This enables passing `-Zverbose-asm` to get contextual comments added by LLVM.
|
||||||
|
|
||||||
|
Sample code:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[no_mangle]
|
||||||
|
pub fn foo(a: i32, b: i32) -> i32 {
|
||||||
|
a + b
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Default output:
|
||||||
|
|
||||||
|
```asm
|
||||||
|
foo:
|
||||||
|
push rax
|
||||||
|
add edi, esi
|
||||||
|
mov dword ptr [rsp + 4], edi
|
||||||
|
seto al
|
||||||
|
jo .LBB0_2
|
||||||
|
mov eax, dword ptr [rsp + 4]
|
||||||
|
pop rcx
|
||||||
|
ret
|
||||||
|
.LBB0_2:
|
||||||
|
lea rdi, [rip + .L__unnamed_1]
|
||||||
|
mov rax, qword ptr [rip + core::panicking::panic_const::panic_const_add_overflow::h9c85248fe0d735b2@GOTPCREL]
|
||||||
|
call rax
|
||||||
|
|
||||||
|
.L__unnamed_2:
|
||||||
|
.ascii "/app/example.rs"
|
||||||
|
|
||||||
|
.L__unnamed_1:
|
||||||
|
.quad .L__unnamed_2
|
||||||
|
.asciz "\017\000\000\000\000\000\000\000\004\000\000\000\005\000\000"
|
||||||
|
```
|
||||||
|
|
||||||
|
With `-Zverbose-asm`:
|
||||||
|
|
||||||
|
```asm
|
||||||
|
foo: # @foo
|
||||||
|
# %bb.0:
|
||||||
|
push rax
|
||||||
|
add edi, esi
|
||||||
|
mov dword ptr [rsp + 4], edi # 4-byte Spill
|
||||||
|
seto al
|
||||||
|
jo .LBB0_2
|
||||||
|
# %bb.1:
|
||||||
|
mov eax, dword ptr [rsp + 4] # 4-byte Reload
|
||||||
|
pop rcx
|
||||||
|
ret
|
||||||
|
.LBB0_2:
|
||||||
|
lea rdi, [rip + .L__unnamed_1]
|
||||||
|
mov rax, qword ptr [rip + core::panicking::panic_const::panic_const_add_overflow::h9c85248fe0d735b2@GOTPCREL]
|
||||||
|
call rax
|
||||||
|
# -- End function
|
||||||
|
.L__unnamed_2:
|
||||||
|
.ascii "/app/example.rs"
|
||||||
|
|
||||||
|
.L__unnamed_1:
|
||||||
|
.quad .L__unnamed_2
|
||||||
|
.asciz "\017\000\000\000\000\000\000\000\004\000\000\000\005\000\000"
|
||||||
|
|
||||||
|
# DW_AT_external
|
||||||
|
```
|
|
@ -321,8 +321,9 @@ pub fn set_host_rpath(cmd: &mut Command) {
|
||||||
/// Read the contents of a file that cannot simply be read by
|
/// Read the contents of a file that cannot simply be read by
|
||||||
/// read_to_string, due to invalid utf8 data, then assert that it contains `expected`.
|
/// read_to_string, due to invalid utf8 data, then assert that it contains `expected`.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn invalid_utf8_contains<P: AsRef<Path>>(path: P, expected: &str) {
|
pub fn invalid_utf8_contains<P: AsRef<Path>, S: AsRef<str>>(path: P, expected: S) {
|
||||||
let buffer = fs_wrapper::read(path.as_ref());
|
let buffer = fs_wrapper::read(path.as_ref());
|
||||||
|
let expected = expected.as_ref();
|
||||||
if !String::from_utf8_lossy(&buffer).contains(expected) {
|
if !String::from_utf8_lossy(&buffer).contains(expected) {
|
||||||
eprintln!("=== FILE CONTENTS (LOSSY) ===");
|
eprintln!("=== FILE CONTENTS (LOSSY) ===");
|
||||||
eprintln!("{}", String::from_utf8_lossy(&buffer));
|
eprintln!("{}", String::from_utf8_lossy(&buffer));
|
||||||
|
@ -335,8 +336,9 @@ pub fn invalid_utf8_contains<P: AsRef<Path>>(path: P, expected: &str) {
|
||||||
/// Read the contents of a file that cannot simply be read by
|
/// Read the contents of a file that cannot simply be read by
|
||||||
/// read_to_string, due to invalid utf8 data, then assert that it does not contain `expected`.
|
/// read_to_string, due to invalid utf8 data, then assert that it does not contain `expected`.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn invalid_utf8_not_contains<P: AsRef<Path>>(path: P, expected: &str) {
|
pub fn invalid_utf8_not_contains<P: AsRef<Path>, S: AsRef<str>>(path: P, expected: S) {
|
||||||
let buffer = fs_wrapper::read(path.as_ref());
|
let buffer = fs_wrapper::read(path.as_ref());
|
||||||
|
let expected = expected.as_ref();
|
||||||
if String::from_utf8_lossy(&buffer).contains(expected) {
|
if String::from_utf8_lossy(&buffer).contains(expected) {
|
||||||
eprintln!("=== FILE CONTENTS (LOSSY) ===");
|
eprintln!("=== FILE CONTENTS (LOSSY) ===");
|
||||||
eprintln!("{}", String::from_utf8_lossy(&buffer));
|
eprintln!("{}", String::from_utf8_lossy(&buffer));
|
||||||
|
|
|
@ -86,7 +86,8 @@ impl Rustc {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specify type(s) of output files to generate.
|
/// Specify type(s) of output files to generate.
|
||||||
pub fn emit(&mut self, kinds: &str) -> &mut Self {
|
pub fn emit<S: AsRef<str>>(&mut self, kinds: S) -> &mut Self {
|
||||||
|
let kinds = kinds.as_ref();
|
||||||
self.cmd.arg(format!("--emit={kinds}"));
|
self.cmd.arg(format!("--emit={kinds}"));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,6 +55,12 @@ actor! {
|
||||||
/// The formats used to compress the tarball
|
/// The formats used to compress the tarball
|
||||||
#[arg(value_name = "FORMAT", default_value_t)]
|
#[arg(value_name = "FORMAT", default_value_t)]
|
||||||
compression_formats: CompressionFormats,
|
compression_formats: CompressionFormats,
|
||||||
|
|
||||||
|
/// Modification time that will be set for all files added to the archive.
|
||||||
|
/// The default is the date of the first Rust commit from 2006.
|
||||||
|
/// This serves for better reproducibility of the archives.
|
||||||
|
#[arg(value_name = "FILE_MTIME", default_value_t = 1153704088)]
|
||||||
|
override_file_mtime: u64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,7 +151,8 @@ impl Combiner {
|
||||||
.input(self.package_name)
|
.input(self.package_name)
|
||||||
.output(path_to_str(&output)?.into())
|
.output(path_to_str(&output)?.into())
|
||||||
.compression_profile(self.compression_profile)
|
.compression_profile(self.compression_profile)
|
||||||
.compression_formats(self.compression_formats);
|
.compression_formats(self.compression_formats)
|
||||||
|
.override_file_mtime(self.override_file_mtime);
|
||||||
tarballer.run()?;
|
tarballer.run()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -61,6 +61,12 @@ actor! {
|
||||||
/// The formats used to compress the tarball
|
/// The formats used to compress the tarball
|
||||||
#[arg(value_name = "FORMAT", default_value_t)]
|
#[arg(value_name = "FORMAT", default_value_t)]
|
||||||
compression_formats: CompressionFormats,
|
compression_formats: CompressionFormats,
|
||||||
|
|
||||||
|
/// Modification time that will be set for all files added to the archive.
|
||||||
|
/// The default is the date of the first Rust commit from 2006.
|
||||||
|
/// This serves for better reproducibility of the archives.
|
||||||
|
#[arg(value_name = "FILE_MTIME", default_value_t = 1153704088)]
|
||||||
|
override_file_mtime: u64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +120,8 @@ impl Generator {
|
||||||
.input(self.package_name)
|
.input(self.package_name)
|
||||||
.output(path_to_str(&output)?.into())
|
.output(path_to_str(&output)?.into())
|
||||||
.compression_profile(self.compression_profile)
|
.compression_profile(self.compression_profile)
|
||||||
.compression_formats(self.compression_formats);
|
.compression_formats(self.compression_formats)
|
||||||
|
.override_file_mtime(self.override_file_mtime);
|
||||||
tarballer.run()?;
|
tarballer.run()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -32,6 +32,12 @@ actor! {
|
||||||
/// The formats used to compress the tarball.
|
/// The formats used to compress the tarball.
|
||||||
#[arg(value_name = "FORMAT", default_value_t)]
|
#[arg(value_name = "FORMAT", default_value_t)]
|
||||||
compression_formats: CompressionFormats,
|
compression_formats: CompressionFormats,
|
||||||
|
|
||||||
|
/// Modification time that will be set for all files added to the archive.
|
||||||
|
/// The default is the date of the first Rust commit from 2006.
|
||||||
|
/// This serves for better reproducibility of the archives.
|
||||||
|
#[arg(value_name = "FILE_MTIME", default_value_t = 1153704088)]
|
||||||
|
override_file_mtime: u64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,6 +71,8 @@ impl Tarballer {
|
||||||
let buf = BufWriter::with_capacity(1024 * 1024, encoder);
|
let buf = BufWriter::with_capacity(1024 * 1024, encoder);
|
||||||
let mut builder = Builder::new(buf);
|
let mut builder = Builder::new(buf);
|
||||||
// Make uid, gid and mtime deterministic to improve reproducibility
|
// Make uid, gid and mtime deterministic to improve reproducibility
|
||||||
|
// The modification time of directories will be set to the date of the first Rust commit.
|
||||||
|
// The modification time of files will be set to `override_file_mtime` (see `append_path`).
|
||||||
builder.mode(HeaderMode::Deterministic);
|
builder.mode(HeaderMode::Deterministic);
|
||||||
|
|
||||||
let pool = rayon::ThreadPoolBuilder::new().num_threads(2).build().unwrap();
|
let pool = rayon::ThreadPoolBuilder::new().num_threads(2).build().unwrap();
|
||||||
|
@ -77,7 +85,7 @@ impl Tarballer {
|
||||||
}
|
}
|
||||||
for path in files {
|
for path in files {
|
||||||
let src = Path::new(&self.work_dir).join(&path);
|
let src = Path::new(&self.work_dir).join(&path);
|
||||||
append_path(&mut builder, &src, &path)
|
append_path(&mut builder, &src, &path, self.override_file_mtime)
|
||||||
.with_context(|| format!("failed to tar file '{}'", src.display()))?;
|
.with_context(|| format!("failed to tar file '{}'", src.display()))?;
|
||||||
}
|
}
|
||||||
builder
|
builder
|
||||||
|
@ -93,10 +101,16 @@ impl Tarballer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_path<W: Write>(builder: &mut Builder<W>, src: &Path, path: &String) -> Result<()> {
|
fn append_path<W: Write>(
|
||||||
|
builder: &mut Builder<W>,
|
||||||
|
src: &Path,
|
||||||
|
path: &String,
|
||||||
|
override_file_mtime: u64,
|
||||||
|
) -> Result<()> {
|
||||||
let stat = symlink_metadata(src)?;
|
let stat = symlink_metadata(src)?;
|
||||||
let mut header = Header::new_gnu();
|
let mut header = Header::new_gnu();
|
||||||
header.set_metadata_in_mode(&stat, HeaderMode::Deterministic);
|
header.set_metadata_in_mode(&stat, HeaderMode::Deterministic);
|
||||||
|
header.set_mtime(override_file_mtime);
|
||||||
|
|
||||||
if stat.file_type().is_symlink() {
|
if stat.file_type().is_symlink() {
|
||||||
let link = read_link(src)?;
|
let link = read_link(src)?;
|
||||||
|
|
12
tests/assembly/asm-comments.rs
Normal file
12
tests/assembly/asm-comments.rs
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
//@ assembly-output: emit-asm
|
||||||
|
//@ only-x86_64
|
||||||
|
// Check that comments in assembly get passed
|
||||||
|
|
||||||
|
#![crate_type = "lib"]
|
||||||
|
|
||||||
|
// CHECK-LABEL: test_comments:
|
||||||
|
#[no_mangle]
|
||||||
|
pub fn test_comments() {
|
||||||
|
// CHECK: example comment
|
||||||
|
unsafe { core::arch::asm!("nop // example comment") };
|
||||||
|
}
|
|
@ -4,7 +4,7 @@ use run_make_support::{fs_wrapper, rustc};
|
||||||
|
|
||||||
fn emit_and_check(out_dir: &Path, out_file: &str, format: &str) {
|
fn emit_and_check(out_dir: &Path, out_file: &str, format: &str) {
|
||||||
let out_file = out_dir.join(out_file);
|
let out_file = out_dir.join(out_file);
|
||||||
rustc().input("foo.rs").emit(&format!("{format}={}", out_file.display())).run();
|
rustc().input("foo.rs").emit(format!("{format}={}", out_file.display())).run();
|
||||||
assert!(out_file.is_file());
|
assert!(out_file.is_file());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
error: expected one of `!` or `::`, found keyword `impl`
|
||||||
|
--> $DIR/safe-impl-trait.rs:5:6
|
||||||
|
|
|
||||||
|
LL | safe impl Bar for () { }
|
||||||
|
| ^^^^ expected one of `!` or `::`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
//@ revisions: gated ungated
|
||||||
|
#![cfg_attr(gated, feature(unsafe_extern_blocks))]
|
||||||
|
|
||||||
|
trait Bar {}
|
||||||
|
safe impl Bar for () { }
|
||||||
|
//~^ ERROR expected one of `!` or `::`, found keyword `impl`
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,8 @@
|
||||||
|
error: expected one of `!` or `::`, found keyword `impl`
|
||||||
|
--> $DIR/safe-impl-trait.rs:5:6
|
||||||
|
|
|
||||||
|
LL | safe impl Bar for () { }
|
||||||
|
| ^^^^ expected one of `!` or `::`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
error: expected one of `!` or `::`, found keyword `trait`
|
||||||
|
--> $DIR/safe-trait.rs:4:6
|
||||||
|
|
|
||||||
|
LL | safe trait Foo {}
|
||||||
|
| ^^^^^ expected one of `!` or `::`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
7
tests/ui/rust-2024/unsafe-extern-blocks/safe-trait.rs
Normal file
7
tests/ui/rust-2024/unsafe-extern-blocks/safe-trait.rs
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
//@ revisions: gated ungated
|
||||||
|
#![cfg_attr(gated, feature(unsafe_extern_blocks))]
|
||||||
|
|
||||||
|
safe trait Foo {}
|
||||||
|
//~^ ERROR expected one of `!` or `::`, found keyword `trait`
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,8 @@
|
||||||
|
error: expected one of `!` or `::`, found keyword `trait`
|
||||||
|
--> $DIR/safe-trait.rs:4:6
|
||||||
|
|
|
||||||
|
LL | safe trait Foo {}
|
||||||
|
| ^^^^^ expected one of `!` or `::`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
29
tests/ui/self/arbitrary-self-from-method-substs-ice.rs
Normal file
29
tests/ui/self/arbitrary-self-from-method-substs-ice.rs
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
//! The same as the non-ICE test, but const eval will run typeck of
|
||||||
|
//! `get` before running wfcheck (as that may in itself trigger const
|
||||||
|
//! eval again, and thus cause bogus cycles). This used to ICE because
|
||||||
|
//! we asserted that an error had already been emitted.
|
||||||
|
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
struct Foo(u32);
|
||||||
|
impl Foo {
|
||||||
|
const fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||||
|
//~^ ERROR: `R` cannot be used as the type of `self`
|
||||||
|
//~| ERROR destructor of `R` cannot be evaluated at compile-time
|
||||||
|
self.0
|
||||||
|
//~^ ERROR cannot borrow here, since the borrowed element may contain interior mutability
|
||||||
|
//~| ERROR cannot call non-const fn `<R as Deref>::deref` in constant function
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const FOO: () = {
|
||||||
|
let foo = Foo(1);
|
||||||
|
foo.get::<&Foo>();
|
||||||
|
};
|
||||||
|
|
||||||
|
const BAR: [(); {
|
||||||
|
FOO;
|
||||||
|
0
|
||||||
|
}] = [];
|
||||||
|
|
||||||
|
fn main() {}
|
46
tests/ui/self/arbitrary-self-from-method-substs-ice.stderr
Normal file
46
tests/ui/self/arbitrary-self-from-method-substs-ice.stderr
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
error[E0658]: cannot borrow here, since the borrowed element may contain interior mutability
|
||||||
|
--> $DIR/arbitrary-self-from-method-substs-ice.rs:13:9
|
||||||
|
|
|
||||||
|
LL | self.0
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #80384 <https://github.com/rust-lang/rust/issues/80384> for more information
|
||||||
|
= help: add `#![feature(const_refs_to_cell)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
|
||||||
|
error[E0015]: cannot call non-const fn `<R as Deref>::deref` in constant functions
|
||||||
|
--> $DIR/arbitrary-self-from-method-substs-ice.rs:13:9
|
||||||
|
|
|
||||||
|
LL | self.0
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
= note: calls in constant functions are limited to constant functions, tuple structs and tuple variants
|
||||||
|
help: add `#![feature(const_trait_impl)]` to the crate attributes to enable
|
||||||
|
|
|
||||||
|
LL + #![feature(const_trait_impl)]
|
||||||
|
|
|
||||||
|
|
||||||
|
error[E0493]: destructor of `R` cannot be evaluated at compile-time
|
||||||
|
--> $DIR/arbitrary-self-from-method-substs-ice.rs:10:43
|
||||||
|
|
|
||||||
|
LL | const fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||||
|
| ^^^^ the destructor for this type cannot be evaluated in constant functions
|
||||||
|
...
|
||||||
|
LL | }
|
||||||
|
| - value is dropped here
|
||||||
|
|
||||||
|
error[E0658]: `R` cannot be used as the type of `self` without the `arbitrary_self_types` feature
|
||||||
|
--> $DIR/arbitrary-self-from-method-substs-ice.rs:10:49
|
||||||
|
|
|
||||||
|
LL | const fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||||
|
| ^
|
||||||
|
|
|
||||||
|
= note: see issue #44874 <https://github.com/rust-lang/rust/issues/44874> for more information
|
||||||
|
= help: add `#![feature(arbitrary_self_types)]` to the crate attributes to enable
|
||||||
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
|
= help: consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, `self: Rc<Self>`, `self: Arc<Self>`, or `self: Pin<P>` (where P is one of the previous types except `Self`)
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
Some errors have detailed explanations: E0015, E0493, E0658.
|
||||||
|
For more information about an error, try `rustc --explain E0015`.
|
|
@ -9,7 +9,6 @@ LL | fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||||
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||||
= help: consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, `self: Rc<Self>`, `self: Arc<Self>`, or `self: Pin<P>` (where P is one of the previous types except `Self`)
|
= help: consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, `self: Rc<Self>`, `self: Arc<Self>`, or `self: Pin<P>` (where P is one of the previous types except `Self`)
|
||||||
|
|
||||||
ERROR rustc_hir_typeck::method::confirm Foo was a subtype of &Foo but now is not?
|
|
||||||
error: aborting due to 1 previous error
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
For more information about this error, try `rustc --explain E0658`.
|
For more information about this error, try `rustc --explain E0658`.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue