Auto merge of #110249 - matthiaskrgr:rollup-7iig04q, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #110153 (Fix typos in compiler) - #110165 (rustdoc: use CSS `overscroll-behavior` instead of JavaScript) - #110175 (Symbol cleanups) - #110203 (Remove `..` from return type notation) - #110205 (rustdoc: make settings radio and checks thicker, less contrast) - #110222 (Improve the error message when forwarding a matched fragment to another macro) - #110237 (Split out a separate feature gate for impl trait in associated types) - #110241 (tidy: Issue an error when UI test limits are too high) Failed merges: - #110218 (Remove `ToRegionVid`) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
4087deaccd
177 changed files with 634 additions and 554 deletions
|
@ -167,9 +167,6 @@ pub enum GenericArgs {
|
||||||
AngleBracketed(AngleBracketedArgs),
|
AngleBracketed(AngleBracketedArgs),
|
||||||
/// The `(A, B)` and `C` in `Foo(A, B) -> C`.
|
/// The `(A, B)` and `C` in `Foo(A, B) -> C`.
|
||||||
Parenthesized(ParenthesizedArgs),
|
Parenthesized(ParenthesizedArgs),
|
||||||
/// Associated return type bounds, like `T: Trait<method(..): Send>`
|
|
||||||
/// which applies the `Send` bound to the return-type of `method`.
|
|
||||||
ReturnTypeNotation(Span),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GenericArgs {
|
impl GenericArgs {
|
||||||
|
@ -181,7 +178,6 @@ impl GenericArgs {
|
||||||
match self {
|
match self {
|
||||||
AngleBracketed(data) => data.span,
|
AngleBracketed(data) => data.span,
|
||||||
Parenthesized(data) => data.span,
|
Parenthesized(data) => data.span,
|
||||||
ReturnTypeNotation(span) => *span,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,7 @@ impl FormatArguments {
|
||||||
}
|
}
|
||||||
if !matches!(arg.kind, FormatArgumentKind::Captured(..)) {
|
if !matches!(arg.kind, FormatArgumentKind::Captured(..)) {
|
||||||
// This is an explicit argument.
|
// This is an explicit argument.
|
||||||
// Make sure that all arguments so far are explcit.
|
// Make sure that all arguments so far are explicit.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
self.num_explicit_args,
|
self.num_explicit_args,
|
||||||
self.arguments.len(),
|
self.arguments.len(),
|
||||||
|
|
|
@ -561,7 +561,6 @@ pub fn noop_visit_generic_args<T: MutVisitor>(generic_args: &mut GenericArgs, vi
|
||||||
match generic_args {
|
match generic_args {
|
||||||
GenericArgs::AngleBracketed(data) => vis.visit_angle_bracketed_parameter_data(data),
|
GenericArgs::AngleBracketed(data) => vis.visit_angle_bracketed_parameter_data(data),
|
||||||
GenericArgs::Parenthesized(data) => vis.visit_parenthesized_parameter_data(data),
|
GenericArgs::Parenthesized(data) => vis.visit_parenthesized_parameter_data(data),
|
||||||
GenericArgs::ReturnTypeNotation(_span) => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -482,7 +482,6 @@ where
|
||||||
walk_list!(visitor, visit_ty, &data.inputs);
|
walk_list!(visitor, visit_ty, &data.inputs);
|
||||||
walk_fn_ret_ty(visitor, &data.output);
|
walk_fn_ret_ty(visitor, &data.output);
|
||||||
}
|
}
|
||||||
GenericArgs::ReturnTypeNotation(_span) => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -137,7 +137,7 @@ pub struct AsyncNonMoveClosureNotSupported {
|
||||||
|
|
||||||
#[derive(Diagnostic, Clone, Copy)]
|
#[derive(Diagnostic, Clone, Copy)]
|
||||||
#[diag(ast_lowering_functional_record_update_destructuring_assignment)]
|
#[diag(ast_lowering_functional_record_update_destructuring_assignment)]
|
||||||
pub struct FunctionalRecordUpdateDestructuringAssignemnt {
|
pub struct FunctionalRecordUpdateDestructuringAssignment {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
#[suggestion(code = "", applicability = "machine-applicable")]
|
#[suggestion(code = "", applicability = "machine-applicable")]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
|
@ -353,13 +353,7 @@ pub enum BadReturnTypeNotation {
|
||||||
#[diag(ast_lowering_bad_return_type_notation_inputs)]
|
#[diag(ast_lowering_bad_return_type_notation_inputs)]
|
||||||
Inputs {
|
Inputs {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
#[suggestion(code = "(..)", applicability = "maybe-incorrect")]
|
#[suggestion(code = "()", applicability = "maybe-incorrect")]
|
||||||
span: Span,
|
|
||||||
},
|
|
||||||
#[diag(ast_lowering_bad_return_type_notation_needs_dots)]
|
|
||||||
NeedsDots {
|
|
||||||
#[primary_span]
|
|
||||||
#[suggestion(code = "(..)", applicability = "maybe-incorrect")]
|
|
||||||
span: Span,
|
span: Span,
|
||||||
},
|
},
|
||||||
#[diag(ast_lowering_bad_return_type_notation_output)]
|
#[diag(ast_lowering_bad_return_type_notation_output)]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::errors::{
|
use super::errors::{
|
||||||
AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
|
AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
|
||||||
BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignemnt,
|
BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignment,
|
||||||
GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
|
GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
|
||||||
UnderscoreExprLhsAssign,
|
UnderscoreExprLhsAssign,
|
||||||
};
|
};
|
||||||
|
@ -434,7 +434,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
// `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the
|
// `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the
|
||||||
// condition in this case.
|
// condition in this case.
|
||||||
//
|
//
|
||||||
// In order to mantain the drop behavior for the non `let` parts of the condition,
|
// In order to maintain the drop behavior for the non `let` parts of the condition,
|
||||||
// we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially
|
// we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially
|
||||||
// gets transformed into `if { let _t = foo; _t } && let pat = val`
|
// gets transformed into `if { let _t = foo; _t } && let pat = val`
|
||||||
match &cond.kind {
|
match &cond.kind {
|
||||||
|
@ -1232,7 +1232,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
);
|
);
|
||||||
let fields_omitted = match &se.rest {
|
let fields_omitted = match &se.rest {
|
||||||
StructRest::Base(e) => {
|
StructRest::Base(e) => {
|
||||||
self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignemnt {
|
self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignment {
|
||||||
span: e.span,
|
span: e.span,
|
||||||
});
|
});
|
||||||
true
|
true
|
||||||
|
|
|
@ -987,15 +987,22 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
GenericArgs::AngleBracketed(data) => {
|
GenericArgs::AngleBracketed(data) => {
|
||||||
self.lower_angle_bracketed_parameter_data(data, ParamMode::Explicit, itctx).0
|
self.lower_angle_bracketed_parameter_data(data, ParamMode::Explicit, itctx).0
|
||||||
}
|
}
|
||||||
&GenericArgs::ReturnTypeNotation(span) => GenericArgsCtor {
|
|
||||||
args: Default::default(),
|
|
||||||
bindings: &[],
|
|
||||||
parenthesized: hir::GenericArgsParentheses::ReturnTypeNotation,
|
|
||||||
span,
|
|
||||||
},
|
|
||||||
GenericArgs::Parenthesized(data) => {
|
GenericArgs::Parenthesized(data) => {
|
||||||
if let Some(start_char) = constraint.ident.as_str().chars().next()
|
if data.inputs.is_empty() && matches!(data.output, FnRetTy::Default(..)) {
|
||||||
&& start_char.is_ascii_lowercase()
|
let parenthesized = if self.tcx.features().return_type_notation {
|
||||||
|
hir::GenericArgsParentheses::ReturnTypeNotation
|
||||||
|
} else {
|
||||||
|
self.emit_bad_parenthesized_trait_in_assoc_ty(data);
|
||||||
|
hir::GenericArgsParentheses::No
|
||||||
|
};
|
||||||
|
GenericArgsCtor {
|
||||||
|
args: Default::default(),
|
||||||
|
bindings: &[],
|
||||||
|
parenthesized,
|
||||||
|
span: data.inputs_span,
|
||||||
|
}
|
||||||
|
} else if let Some(first_char) = constraint.ident.as_str().chars().next()
|
||||||
|
&& first_char.is_ascii_lowercase()
|
||||||
{
|
{
|
||||||
let mut err = if !data.inputs.is_empty() {
|
let mut err = if !data.inputs.is_empty() {
|
||||||
self.tcx.sess.create_err(errors::BadReturnTypeNotation::Inputs {
|
self.tcx.sess.create_err(errors::BadReturnTypeNotation::Inputs {
|
||||||
|
@ -1006,9 +1013,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
span: data.inputs_span.shrink_to_hi().to(ty.span),
|
span: data.inputs_span.shrink_to_hi().to(ty.span),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
self.tcx.sess.create_err(errors::BadReturnTypeNotation::NeedsDots {
|
unreachable!("inputs are empty and return type is not provided")
|
||||||
span: data.inputs_span,
|
|
||||||
})
|
|
||||||
};
|
};
|
||||||
if !self.tcx.features().return_type_notation
|
if !self.tcx.features().return_type_notation
|
||||||
&& self.tcx.sess.is_nightly_build()
|
&& self.tcx.sess.is_nightly_build()
|
||||||
|
|
|
@ -13,7 +13,6 @@ use rustc_span::symbol::{kw, sym, Ident};
|
||||||
use rustc_span::{BytePos, Span, DUMMY_SP};
|
use rustc_span::{BytePos, Span, DUMMY_SP};
|
||||||
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use thin_vec::ThinVec;
|
|
||||||
|
|
||||||
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
|
@ -219,18 +218,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
&GenericArgs::ReturnTypeNotation(span) => {
|
|
||||||
self.tcx.sess.emit_err(GenericTypeWithParentheses { span, sub: None });
|
|
||||||
(
|
|
||||||
self.lower_angle_bracketed_parameter_data(
|
|
||||||
&AngleBracketedArgs { span, args: ThinVec::default() },
|
|
||||||
param_mode,
|
|
||||||
itctx,
|
|
||||||
)
|
|
||||||
.0,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
(
|
(
|
||||||
|
|
|
@ -1080,7 +1080,6 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
self.with_impl_trait(None, |this| this.visit_ty(ty));
|
self.with_impl_trait(None, |this| this.visit_ty(ty));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
GenericArgs::ReturnTypeNotation(_span) => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1391,7 +1390,6 @@ fn deny_equality_constraints(
|
||||||
match &mut assoc_path.segments[len].args {
|
match &mut assoc_path.segments[len].args {
|
||||||
Some(args) => match args.deref_mut() {
|
Some(args) => match args.deref_mut() {
|
||||||
GenericArgs::Parenthesized(_) => continue,
|
GenericArgs::Parenthesized(_) => continue,
|
||||||
GenericArgs::ReturnTypeNotation(_span) => continue,
|
|
||||||
GenericArgs::AngleBracketed(args) => {
|
GenericArgs::AngleBracketed(args) => {
|
||||||
args.args.push(arg);
|
args.args.push(arg);
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,24 +121,34 @@ impl<'a> PostExpansionVisitor<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Feature gate `impl Trait` inside `type Alias = $type_expr;`.
|
/// Feature gate `impl Trait` inside `type Alias = $type_expr;`.
|
||||||
fn check_impl_trait(&self, ty: &ast::Ty) {
|
fn check_impl_trait(&self, ty: &ast::Ty, in_associated_ty: bool) {
|
||||||
struct ImplTraitVisitor<'a> {
|
struct ImplTraitVisitor<'a> {
|
||||||
vis: &'a PostExpansionVisitor<'a>,
|
vis: &'a PostExpansionVisitor<'a>,
|
||||||
|
in_associated_ty: bool,
|
||||||
}
|
}
|
||||||
impl Visitor<'_> for ImplTraitVisitor<'_> {
|
impl Visitor<'_> for ImplTraitVisitor<'_> {
|
||||||
fn visit_ty(&mut self, ty: &ast::Ty) {
|
fn visit_ty(&mut self, ty: &ast::Ty) {
|
||||||
if let ast::TyKind::ImplTrait(..) = ty.kind {
|
if let ast::TyKind::ImplTrait(..) = ty.kind {
|
||||||
gate_feature_post!(
|
if self.in_associated_ty {
|
||||||
&self.vis,
|
gate_feature_post!(
|
||||||
type_alias_impl_trait,
|
&self.vis,
|
||||||
ty.span,
|
impl_trait_in_assoc_type,
|
||||||
"`impl Trait` in type aliases is unstable"
|
ty.span,
|
||||||
);
|
"`impl Trait` in associated types is unstable"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
gate_feature_post!(
|
||||||
|
&self.vis,
|
||||||
|
type_alias_impl_trait,
|
||||||
|
ty.span,
|
||||||
|
"`impl Trait` in type aliases is unstable"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
visit::walk_ty(self, ty);
|
visit::walk_ty(self, ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ImplTraitVisitor { vis: self }.visit_ty(ty);
|
ImplTraitVisitor { vis: self, in_associated_ty }.visit_ty(ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_late_bound_lifetime_defs(&self, params: &[ast::GenericParam]) {
|
fn check_late_bound_lifetime_defs(&self, params: &[ast::GenericParam]) {
|
||||||
|
@ -294,7 +304,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
|
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
|
||||||
self.check_impl_trait(&ty)
|
self.check_impl_trait(&ty, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -485,20 +495,23 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||||
|
|
||||||
fn visit_assoc_constraint(&mut self, constraint: &'a AssocConstraint) {
|
fn visit_assoc_constraint(&mut self, constraint: &'a AssocConstraint) {
|
||||||
if let AssocConstraintKind::Bound { .. } = constraint.kind {
|
if let AssocConstraintKind::Bound { .. } = constraint.kind {
|
||||||
if let Some(args) = constraint.gen_args.as_ref()
|
if let Some(ast::GenericArgs::Parenthesized(args)) = constraint.gen_args.as_ref()
|
||||||
&& matches!(
|
&& args.inputs.is_empty()
|
||||||
args,
|
&& matches!(args.output, ast::FnRetTy::Default(..))
|
||||||
ast::GenericArgs::ReturnTypeNotation(..)
|
|
||||||
)
|
|
||||||
{
|
{
|
||||||
// RTN is gated below with a `gate_all`.
|
gate_feature_post!(
|
||||||
|
&self,
|
||||||
|
return_type_notation,
|
||||||
|
constraint.span,
|
||||||
|
"return type notation is experimental"
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
gate_feature_post!(
|
gate_feature_post!(
|
||||||
&self,
|
&self,
|
||||||
associated_type_bounds,
|
associated_type_bounds,
|
||||||
constraint.span,
|
constraint.span,
|
||||||
"associated type bounds are unstable"
|
"associated type bounds are unstable"
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
visit::walk_assoc_constraint(self, constraint)
|
visit::walk_assoc_constraint(self, constraint)
|
||||||
|
@ -517,7 +530,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if let Some(ty) = ty {
|
if let Some(ty) = ty {
|
||||||
self.check_impl_trait(ty);
|
self.check_impl_trait(ty, true);
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
@ -589,7 +602,6 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
||||||
gate_all!(yeet_expr, "`do yeet` expression is experimental");
|
gate_all!(yeet_expr, "`do yeet` expression is experimental");
|
||||||
gate_all!(dyn_star, "`dyn*` trait objects are experimental");
|
gate_all!(dyn_star, "`dyn*` trait objects are experimental");
|
||||||
gate_all!(const_closures, "const closures are experimental");
|
gate_all!(const_closures, "const closures are experimental");
|
||||||
gate_all!(return_type_notation, "return type notation is experimental");
|
|
||||||
|
|
||||||
// All uses of `gate_all!` below this point were added in #65742,
|
// All uses of `gate_all!` below this point were added in #65742,
|
||||||
// and subsequently disabled (with the non-early gating readded).
|
// and subsequently disabled (with the non-early gating readded).
|
||||||
|
@ -605,6 +617,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
||||||
|
|
||||||
gate_all!(trait_alias, "trait aliases are experimental");
|
gate_all!(trait_alias, "trait aliases are experimental");
|
||||||
gate_all!(associated_type_bounds, "associated type bounds are unstable");
|
gate_all!(associated_type_bounds, "associated type bounds are unstable");
|
||||||
|
gate_all!(return_type_notation, "return type notation is experimental");
|
||||||
gate_all!(decl_macro, "`macro` is experimental");
|
gate_all!(decl_macro, "`macro` is experimental");
|
||||||
gate_all!(box_patterns, "box pattern syntax is experimental");
|
gate_all!(box_patterns, "box pattern syntax is experimental");
|
||||||
gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental");
|
gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental");
|
||||||
|
|
|
@ -936,10 +936,6 @@ impl<'a> PrintState<'a> for State<'a> {
|
||||||
self.word(")");
|
self.word(")");
|
||||||
self.print_fn_ret_ty(&data.output);
|
self.print_fn_ret_ty(&data.output);
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::GenericArgs::ReturnTypeNotation(_span) => {
|
|
||||||
self.word("(..)");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::{
|
||||||
/// The construct graph organizes the constraints by their end-points.
|
/// The construct graph organizes the constraints by their end-points.
|
||||||
/// It can be used to view a `R1: R2` constraint as either an edge `R1
|
/// It can be used to view a `R1: R2` constraint as either an edge `R1
|
||||||
/// -> R2` or `R2 -> R1` depending on the direction type `D`.
|
/// -> R2` or `R2 -> R1` depending on the direction type `D`.
|
||||||
pub(crate) struct ConstraintGraph<D: ConstraintGraphDirecton> {
|
pub(crate) struct ConstraintGraph<D: ConstraintGraphDirection> {
|
||||||
_direction: D,
|
_direction: D,
|
||||||
first_constraints: IndexVec<RegionVid, Option<OutlivesConstraintIndex>>,
|
first_constraints: IndexVec<RegionVid, Option<OutlivesConstraintIndex>>,
|
||||||
next_constraints: IndexVec<OutlivesConstraintIndex, Option<OutlivesConstraintIndex>>,
|
next_constraints: IndexVec<OutlivesConstraintIndex, Option<OutlivesConstraintIndex>>,
|
||||||
|
@ -25,7 +25,7 @@ pub(crate) type ReverseConstraintGraph = ConstraintGraph<Reverse>;
|
||||||
|
|
||||||
/// Marker trait that controls whether a `R1: R2` constraint
|
/// Marker trait that controls whether a `R1: R2` constraint
|
||||||
/// represents an edge `R1 -> R2` or `R2 -> R1`.
|
/// represents an edge `R1 -> R2` or `R2 -> R1`.
|
||||||
pub(crate) trait ConstraintGraphDirecton: Copy + 'static {
|
pub(crate) trait ConstraintGraphDirection: Copy + 'static {
|
||||||
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid;
|
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid;
|
||||||
fn end_region(c: &OutlivesConstraint<'_>) -> RegionVid;
|
fn end_region(c: &OutlivesConstraint<'_>) -> RegionVid;
|
||||||
fn is_normal() -> bool;
|
fn is_normal() -> bool;
|
||||||
|
@ -38,7 +38,7 @@ pub(crate) trait ConstraintGraphDirecton: Copy + 'static {
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub(crate) struct Normal;
|
pub(crate) struct Normal;
|
||||||
|
|
||||||
impl ConstraintGraphDirecton for Normal {
|
impl ConstraintGraphDirection for Normal {
|
||||||
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
|
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
|
||||||
c.sup
|
c.sup
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ impl ConstraintGraphDirecton for Normal {
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub(crate) struct Reverse;
|
pub(crate) struct Reverse;
|
||||||
|
|
||||||
impl ConstraintGraphDirecton for Reverse {
|
impl ConstraintGraphDirection for Reverse {
|
||||||
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
|
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
|
||||||
c.sub
|
c.sub
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,7 @@ impl ConstraintGraphDirecton for Reverse {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D: ConstraintGraphDirecton> ConstraintGraph<D> {
|
impl<D: ConstraintGraphDirection> ConstraintGraph<D> {
|
||||||
/// Creates a "dependency graph" where each region constraint `R1:
|
/// Creates a "dependency graph" where each region constraint `R1:
|
||||||
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
||||||
/// construct SCCs for region inference but also for error
|
/// construct SCCs for region inference but also for error
|
||||||
|
@ -133,7 +133,7 @@ impl<D: ConstraintGraphDirecton> ConstraintGraph<D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirecton> {
|
pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirection> {
|
||||||
graph: &'s ConstraintGraph<D>,
|
graph: &'s ConstraintGraph<D>,
|
||||||
constraints: &'s OutlivesConstraintSet<'tcx>,
|
constraints: &'s OutlivesConstraintSet<'tcx>,
|
||||||
pointer: Option<OutlivesConstraintIndex>,
|
pointer: Option<OutlivesConstraintIndex>,
|
||||||
|
@ -141,7 +141,7 @@ pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirecton> {
|
||||||
static_region: RegionVid,
|
static_region: RegionVid,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> Iterator for Edges<'s, 'tcx, D> {
|
||||||
type Item = OutlivesConstraint<'tcx>;
|
type Item = OutlivesConstraint<'tcx>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
@ -174,13 +174,13 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> {
|
||||||
/// This struct brings together a constraint set and a (normal, not
|
/// This struct brings together a constraint set and a (normal, not
|
||||||
/// reverse) constraint graph. It implements the graph traits and is
|
/// reverse) constraint graph. It implements the graph traits and is
|
||||||
/// usd for doing the SCC computation.
|
/// usd for doing the SCC computation.
|
||||||
pub(crate) struct RegionGraph<'s, 'tcx, D: ConstraintGraphDirecton> {
|
pub(crate) struct RegionGraph<'s, 'tcx, D: ConstraintGraphDirection> {
|
||||||
set: &'s OutlivesConstraintSet<'tcx>,
|
set: &'s OutlivesConstraintSet<'tcx>,
|
||||||
constraint_graph: &'s ConstraintGraph<D>,
|
constraint_graph: &'s ConstraintGraph<D>,
|
||||||
static_region: RegionVid,
|
static_region: RegionVid,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> RegionGraph<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> RegionGraph<'s, 'tcx, D> {
|
||||||
/// Creates a "dependency graph" where each region constraint `R1:
|
/// Creates a "dependency graph" where each region constraint `R1:
|
||||||
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
||||||
/// construct SCCs for region inference but also for error
|
/// construct SCCs for region inference but also for error
|
||||||
|
@ -202,11 +202,11 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> RegionGraph<'s, 'tcx, D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Successors<'s, 'tcx, D: ConstraintGraphDirecton> {
|
pub(crate) struct Successors<'s, 'tcx, D: ConstraintGraphDirection> {
|
||||||
edges: Edges<'s, 'tcx, D>,
|
edges: Edges<'s, 'tcx, D>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Successors<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> Iterator for Successors<'s, 'tcx, D> {
|
||||||
type Item = RegionVid;
|
type Item = RegionVid;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
@ -214,23 +214,25 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Successors<'s, 'tcx, D>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::DirectedGraph for RegionGraph<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::DirectedGraph for RegionGraph<'s, 'tcx, D> {
|
||||||
type Node = RegionVid;
|
type Node = RegionVid;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::WithNumNodes for RegionGraph<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::WithNumNodes for RegionGraph<'s, 'tcx, D> {
|
||||||
fn num_nodes(&self) -> usize {
|
fn num_nodes(&self) -> usize {
|
||||||
self.constraint_graph.first_constraints.len()
|
self.constraint_graph.first_constraints.len()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::WithSuccessors for RegionGraph<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::WithSuccessors for RegionGraph<'s, 'tcx, D> {
|
||||||
fn successors(&self, node: Self::Node) -> <Self as graph::GraphSuccessors<'_>>::Iter {
|
fn successors(&self, node: Self::Node) -> <Self as graph::GraphSuccessors<'_>>::Iter {
|
||||||
self.outgoing_regions(node)
|
self.outgoing_regions(node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::GraphSuccessors<'_> for RegionGraph<'s, 'tcx, D> {
|
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::GraphSuccessors<'_>
|
||||||
|
for RegionGraph<'s, 'tcx, D>
|
||||||
|
{
|
||||||
type Item = RegionVid;
|
type Item = RegionVid;
|
||||||
type Iter = Successors<'s, 'tcx, D>;
|
type Iter = Successors<'s, 'tcx, D>;
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,12 +51,10 @@ impl RegionCtxt {
|
||||||
/// Used to determine the representative of a component in the strongly connected
|
/// Used to determine the representative of a component in the strongly connected
|
||||||
/// constraint graph
|
/// constraint graph
|
||||||
pub(crate) fn preference_value(self) -> usize {
|
pub(crate) fn preference_value(self) -> usize {
|
||||||
let _anon = Symbol::intern("anon");
|
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
RegionCtxt::Unknown => 1,
|
RegionCtxt::Unknown => 1,
|
||||||
RegionCtxt::Existential(None) => 2,
|
RegionCtxt::Existential(None) => 2,
|
||||||
RegionCtxt::Existential(Some(_anon)) | RegionCtxt::Free(_anon) => 2,
|
RegionCtxt::Existential(Some(_)) | RegionCtxt::Free(_) => 2,
|
||||||
RegionCtxt::Location(_) => 3,
|
RegionCtxt::Location(_) => 3,
|
||||||
RegionCtxt::TyContext(_) => 4,
|
RegionCtxt::TyContext(_) => 4,
|
||||||
_ => 5,
|
_ => 5,
|
||||||
|
|
|
@ -35,6 +35,7 @@ use rustc_middle::ty::{
|
||||||
OpaqueHiddenType, OpaqueTypeKey, RegionVid, Ty, TyCtxt, UserType, UserTypeAnnotationIndex,
|
OpaqueHiddenType, OpaqueTypeKey, RegionVid, Ty, TyCtxt, UserType, UserTypeAnnotationIndex,
|
||||||
};
|
};
|
||||||
use rustc_span::def_id::CRATE_DEF_ID;
|
use rustc_span::def_id::CRATE_DEF_ID;
|
||||||
|
use rustc_span::symbol::sym;
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use rustc_span::{Span, DUMMY_SP};
|
||||||
use rustc_target::abi::{FieldIdx, FIRST_VARIANT};
|
use rustc_target::abi::{FieldIdx, FIRST_VARIANT};
|
||||||
use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints;
|
use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints;
|
||||||
|
@ -1338,18 +1339,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
};
|
};
|
||||||
let (sig, map) = tcx.replace_late_bound_regions(sig, |br| {
|
let (sig, map) = tcx.replace_late_bound_regions(sig, |br| {
|
||||||
use crate::renumber::{BoundRegionInfo, RegionCtxt};
|
use crate::renumber::{BoundRegionInfo, RegionCtxt};
|
||||||
use rustc_span::Symbol;
|
|
||||||
|
|
||||||
let region_ctxt_fn = || {
|
let region_ctxt_fn = || {
|
||||||
let reg_info = match br.kind {
|
let reg_info = match br.kind {
|
||||||
ty::BoundRegionKind::BrAnon(Some(span)) => BoundRegionInfo::Span(span),
|
ty::BoundRegionKind::BrAnon(Some(span)) => BoundRegionInfo::Span(span),
|
||||||
ty::BoundRegionKind::BrAnon(..) => {
|
ty::BoundRegionKind::BrAnon(..) => BoundRegionInfo::Name(sym::anon),
|
||||||
BoundRegionInfo::Name(Symbol::intern("anon"))
|
|
||||||
}
|
|
||||||
ty::BoundRegionKind::BrNamed(_, name) => BoundRegionInfo::Name(name),
|
ty::BoundRegionKind::BrNamed(_, name) => BoundRegionInfo::Name(name),
|
||||||
ty::BoundRegionKind::BrEnv => {
|
ty::BoundRegionKind::BrEnv => BoundRegionInfo::Name(sym::env),
|
||||||
BoundRegionInfo::Name(Symbol::intern("env"))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
RegionCtxt::LateBound(reg_info)
|
RegionCtxt::LateBound(reg_info)
|
||||||
|
@ -2600,7 +2596,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
self.implicit_region_bound,
|
self.implicit_region_bound,
|
||||||
self.param_env,
|
self.param_env,
|
||||||
location.to_locations(),
|
location.to_locations(),
|
||||||
DUMMY_SP, // irrelevant; will be overrided.
|
DUMMY_SP, // irrelevant; will be overridden.
|
||||||
ConstraintCategory::Boring, // same as above.
|
ConstraintCategory::Boring, // same as above.
|
||||||
&mut self.borrowck_context.constraints,
|
&mut self.borrowck_context.constraints,
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,6 +4,7 @@ use rustc_infer::traits::PredicateObligations;
|
||||||
use rustc_middle::mir::ConstraintCategory;
|
use rustc_middle::mir::ConstraintCategory;
|
||||||
use rustc_middle::ty::relate::TypeRelation;
|
use rustc_middle::ty::relate::TypeRelation;
|
||||||
use rustc_middle::ty::{self, Ty};
|
use rustc_middle::ty::{self, Ty};
|
||||||
|
use rustc_span::symbol::sym;
|
||||||
use rustc_span::{Span, Symbol};
|
use rustc_span::{Span, Symbol};
|
||||||
use rustc_trait_selection::traits::query::Fallible;
|
use rustc_trait_selection::traits::query::Fallible;
|
||||||
|
|
||||||
|
@ -125,9 +126,9 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx>
|
||||||
|
|
||||||
let reg_info = match placeholder.bound.kind {
|
let reg_info = match placeholder.bound.kind {
|
||||||
ty::BoundRegionKind::BrAnon(Some(span)) => BoundRegionInfo::Span(span),
|
ty::BoundRegionKind::BrAnon(Some(span)) => BoundRegionInfo::Span(span),
|
||||||
ty::BoundRegionKind::BrAnon(..) => BoundRegionInfo::Name(Symbol::intern("anon")),
|
ty::BoundRegionKind::BrAnon(..) => BoundRegionInfo::Name(sym::anon),
|
||||||
ty::BoundRegionKind::BrNamed(_, name) => BoundRegionInfo::Name(name),
|
ty::BoundRegionKind::BrNamed(_, name) => BoundRegionInfo::Name(name),
|
||||||
ty::BoundRegionKind::BrEnv => BoundRegionInfo::Name(Symbol::intern("env")),
|
ty::BoundRegionKind::BrEnv => BoundRegionInfo::Name(sym::env),
|
||||||
};
|
};
|
||||||
|
|
||||||
let reg_var =
|
let reg_var =
|
||||||
|
|
|
@ -24,6 +24,7 @@ use rustc_infer::infer::NllRegionVariableOrigin;
|
||||||
use rustc_middle::ty::fold::TypeFoldable;
|
use rustc_middle::ty::fold::TypeFoldable;
|
||||||
use rustc_middle::ty::{self, InlineConstSubsts, InlineConstSubstsParts, RegionVid, Ty, TyCtxt};
|
use rustc_middle::ty::{self, InlineConstSubsts, InlineConstSubstsParts, RegionVid, Ty, TyCtxt};
|
||||||
use rustc_middle::ty::{InternalSubsts, SubstsRef};
|
use rustc_middle::ty::{InternalSubsts, SubstsRef};
|
||||||
|
use rustc_span::symbol::{kw, sym};
|
||||||
use rustc_span::Symbol;
|
use rustc_span::Symbol;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
|
@ -404,10 +405,8 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||||
assert_eq!(FIRST_GLOBAL_INDEX, self.infcx.num_region_vars());
|
assert_eq!(FIRST_GLOBAL_INDEX, self.infcx.num_region_vars());
|
||||||
|
|
||||||
// Create the "global" region that is always free in all contexts: 'static.
|
// Create the "global" region that is always free in all contexts: 'static.
|
||||||
let fr_static = self
|
let fr_static =
|
||||||
.infcx
|
self.infcx.next_nll_region_var(FR, || RegionCtxt::Free(kw::Static)).to_region_vid();
|
||||||
.next_nll_region_var(FR, || RegionCtxt::Free(Symbol::intern("static")))
|
|
||||||
.to_region_vid();
|
|
||||||
|
|
||||||
// We've now added all the global regions. The next ones we
|
// We've now added all the global regions. The next ones we
|
||||||
// add will be external.
|
// add will be external.
|
||||||
|
@ -440,11 +439,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||||
debug!(?r);
|
debug!(?r);
|
||||||
if !indices.indices.contains_key(&r) {
|
if !indices.indices.contains_key(&r) {
|
||||||
let region_vid = {
|
let region_vid = {
|
||||||
let name = match r.get_name() {
|
let name = r.get_name_or_anon();
|
||||||
Some(name) => name,
|
|
||||||
_ => Symbol::intern("anon"),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.infcx.next_nll_region_var(FR, || {
|
self.infcx.next_nll_region_var(FR, || {
|
||||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||||
})
|
})
|
||||||
|
@ -478,11 +473,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||||
debug!(?r);
|
debug!(?r);
|
||||||
if !indices.indices.contains_key(&r) {
|
if !indices.indices.contains_key(&r) {
|
||||||
let region_vid = {
|
let region_vid = {
|
||||||
let name = match r.get_name() {
|
let name = r.get_name_or_anon();
|
||||||
Some(name) => name,
|
|
||||||
_ => Symbol::intern("anon"),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.infcx.next_nll_region_var(FR, || {
|
self.infcx.next_nll_region_var(FR, || {
|
||||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||||
})
|
})
|
||||||
|
@ -768,15 +759,10 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||||
{
|
{
|
||||||
self.infcx.tcx.fold_regions(value, |region, _depth| {
|
self.infcx.tcx.fold_regions(value, |region, _depth| {
|
||||||
let name = match region.get_name() {
|
let name = region.get_name_or_anon();
|
||||||
Some(name) => name,
|
|
||||||
_ => Symbol::intern("anon"),
|
|
||||||
};
|
|
||||||
debug!(?region, ?name);
|
debug!(?region, ?name);
|
||||||
|
|
||||||
let reg_var = self.next_nll_region_var(origin, || RegionCtxt::Free(name));
|
self.next_nll_region_var(origin, || RegionCtxt::Free(name))
|
||||||
|
|
||||||
reg_var
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -797,7 +783,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||||
let region_vid = {
|
let region_vid = {
|
||||||
let name = match br.kind.get_name() {
|
let name = match br.kind.get_name() {
|
||||||
Some(name) => name,
|
Some(name) => name,
|
||||||
_ => Symbol::intern("anon"),
|
_ => sym::anon,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.next_nll_region_var(origin, || RegionCtxt::Bound(BoundRegionInfo::Name(name)))
|
self.next_nll_region_var(origin, || RegionCtxt::Bound(BoundRegionInfo::Name(name)))
|
||||||
|
@ -829,11 +815,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||||
debug!(?r);
|
debug!(?r);
|
||||||
if !indices.indices.contains_key(&r) {
|
if !indices.indices.contains_key(&r) {
|
||||||
let region_vid = {
|
let region_vid = {
|
||||||
let name = match r.get_name() {
|
let name = r.get_name_or_anon();
|
||||||
Some(name) => name,
|
|
||||||
_ => Symbol::intern("anon"),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.next_nll_region_var(FR, || {
|
self.next_nll_region_var(FR, || {
|
||||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||||
})
|
})
|
||||||
|
@ -855,11 +837,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for BorrowckInferCtxt<'cx, 'tcx> {
|
||||||
debug!(?r);
|
debug!(?r);
|
||||||
if !indices.indices.contains_key(&r) {
|
if !indices.indices.contains_key(&r) {
|
||||||
let region_vid = {
|
let region_vid = {
|
||||||
let name = match r.get_name() {
|
let name = r.get_name_or_anon();
|
||||||
Some(name) => name,
|
|
||||||
_ => Symbol::intern("anon"),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.next_nll_region_var(FR, || {
|
self.next_nll_region_var(FR, || {
|
||||||
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
RegionCtxt::LateBound(BoundRegionInfo::Name(name))
|
||||||
})
|
})
|
||||||
|
|
|
@ -42,7 +42,7 @@ struct MacroInput {
|
||||||
fmtstr: P<Expr>,
|
fmtstr: P<Expr>,
|
||||||
args: FormatArguments,
|
args: FormatArguments,
|
||||||
/// Whether the first argument was a string literal or a result from eager macro expansion.
|
/// Whether the first argument was a string literal or a result from eager macro expansion.
|
||||||
/// If it's not a string literal, we disallow implicit arugment capturing.
|
/// If it's not a string literal, we disallow implicit argument capturing.
|
||||||
///
|
///
|
||||||
/// This does not correspond to whether we can treat spans to the literal normally, as the whole
|
/// This does not correspond to whether we can treat spans to the literal normally, as the whole
|
||||||
/// invocation might be the result of another macro expansion, in which case this flag may still be true.
|
/// invocation might be the result of another macro expansion, in which case this flag may still be true.
|
||||||
|
|
|
@ -141,7 +141,7 @@ codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but
|
||||||
|
|
||||||
codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
|
codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
|
||||||
|
|
||||||
codegen_ssa_unsufficient_vs_code_product = VS Code is a different product, and is not sufficient.
|
codegen_ssa_insufficient_vs_code_product = VS Code is a different product, and is not sufficient.
|
||||||
|
|
||||||
codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
|
codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
|
||||||
.note = {$output}
|
.note = {$output}
|
||||||
|
|
|
@ -923,7 +923,7 @@ fn link_natively<'a>(
|
||||||
if sess.target.is_like_msvc && linker_not_found {
|
if sess.target.is_like_msvc && linker_not_found {
|
||||||
sess.emit_note(errors::MsvcMissingLinker);
|
sess.emit_note(errors::MsvcMissingLinker);
|
||||||
sess.emit_note(errors::CheckInstalledVisualStudio);
|
sess.emit_note(errors::CheckInstalledVisualStudio);
|
||||||
sess.emit_note(errors::UnsufficientVSCodeProduct);
|
sess.emit_note(errors::InsufficientVSCodeProduct);
|
||||||
}
|
}
|
||||||
sess.abort_if_errors();
|
sess.abort_if_errors();
|
||||||
}
|
}
|
||||||
|
|
|
@ -405,8 +405,8 @@ pub struct MsvcMissingLinker;
|
||||||
pub struct CheckInstalledVisualStudio;
|
pub struct CheckInstalledVisualStudio;
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(codegen_ssa_unsufficient_vs_code_product)]
|
#[diag(codegen_ssa_insufficient_vs_code_product)]
|
||||||
pub struct UnsufficientVSCodeProduct;
|
pub struct InsufficientVSCodeProduct;
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(codegen_ssa_processing_dymutil_failed)]
|
#[diag(codegen_ssa_processing_dymutil_failed)]
|
||||||
|
|
|
@ -205,7 +205,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
|
||||||
let cid = key.value;
|
let cid = key.value;
|
||||||
let def_id = cid.instance.def.def_id();
|
let def_id = cid.instance.def.def_id();
|
||||||
let is_static = tcx.is_static(def_id);
|
let is_static = tcx.is_static(def_id);
|
||||||
// This is just accessing an already computed constant, so no need to check alginment here.
|
// This is just accessing an already computed constant, so no need to check alignment here.
|
||||||
let ecx = mk_eval_cx(
|
let ecx = mk_eval_cx(
|
||||||
tcx,
|
tcx,
|
||||||
tcx.def_span(key.value.instance.def_id()),
|
tcx.def_span(key.value.instance.def_id()),
|
||||||
|
|
|
@ -135,4 +135,4 @@ expand_proc_macro_panicked =
|
||||||
.help = message: {$message}
|
.help = message: {$message}
|
||||||
|
|
||||||
expand_proc_macro_derive_tokens =
|
expand_proc_macro_derive_tokens =
|
||||||
proc-macro derive produced unparseable tokens
|
proc-macro derive produced unparsable tokens
|
||||||
|
|
|
@ -66,7 +66,12 @@ pub(super) fn failed_to_match_macro<'cx>(
|
||||||
&& (matches!(expected_token.kind, TokenKind::Interpolated(_))
|
&& (matches!(expected_token.kind, TokenKind::Interpolated(_))
|
||||||
|| matches!(token.kind, TokenKind::Interpolated(_)))
|
|| matches!(token.kind, TokenKind::Interpolated(_)))
|
||||||
{
|
{
|
||||||
err.note("captured metavariables except for `$tt`, `$ident` and `$lifetime` cannot be compared to other tokens");
|
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
|
||||||
|
err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information");
|
||||||
|
|
||||||
|
if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) {
|
||||||
|
err.help("try using `:tt` instead in the macro definition");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
|
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
|
||||||
|
|
|
@ -309,7 +309,7 @@ declare_features! (
|
||||||
(active, associated_type_defaults, "1.2.0", Some(29661), None),
|
(active, associated_type_defaults, "1.2.0", Some(29661), None),
|
||||||
/// Allows `async || body` closures.
|
/// Allows `async || body` closures.
|
||||||
(active, async_closure, "1.37.0", Some(62290), None),
|
(active, async_closure, "1.37.0", Some(62290), None),
|
||||||
/// Alows async functions to be declared, implemented, and used in traits.
|
/// Allows async functions to be declared, implemented, and used in traits.
|
||||||
(incomplete, async_fn_in_trait, "1.66.0", Some(91611), None),
|
(incomplete, async_fn_in_trait, "1.66.0", Some(91611), None),
|
||||||
/// Allows `extern "C-unwind" fn` to enable unwinding across ABI boundaries.
|
/// Allows `extern "C-unwind" fn` to enable unwinding across ABI boundaries.
|
||||||
(active, c_unwind, "1.52.0", Some(74990), None),
|
(active, c_unwind, "1.52.0", Some(74990), None),
|
||||||
|
@ -416,6 +416,8 @@ declare_features! (
|
||||||
(active, half_open_range_patterns_in_slices, "1.66.0", Some(67264), None),
|
(active, half_open_range_patterns_in_slices, "1.66.0", Some(67264), None),
|
||||||
/// Allows `if let` guard in match arms.
|
/// Allows `if let` guard in match arms.
|
||||||
(active, if_let_guard, "1.47.0", Some(51114), None),
|
(active, if_let_guard, "1.47.0", Some(51114), None),
|
||||||
|
/// Allows `impl Trait` to be used inside associated types (RFC 2515).
|
||||||
|
(active, impl_trait_in_assoc_type, "CURRENT_RUSTC_VERSION", Some(63063), None),
|
||||||
/// Allows `impl Trait` as output type in `Fn` traits in return position of functions.
|
/// Allows `impl Trait` as output type in `Fn` traits in return position of functions.
|
||||||
(active, impl_trait_in_fn_trait_return, "1.64.0", Some(99697), None),
|
(active, impl_trait_in_fn_trait_return, "1.64.0", Some(99697), None),
|
||||||
/// Allows referencing `Self` and projections in impl-trait.
|
/// Allows referencing `Self` and projections in impl-trait.
|
||||||
|
|
|
@ -58,7 +58,7 @@ impl<'tcx> Bounds<'tcx> {
|
||||||
pub fn push_sized(&mut self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span) {
|
pub fn push_sized(&mut self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span) {
|
||||||
let sized_def_id = tcx.require_lang_item(LangItem::Sized, Some(span));
|
let sized_def_id = tcx.require_lang_item(LangItem::Sized, Some(span));
|
||||||
let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(sized_def_id, [ty]));
|
let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(sized_def_id, [ty]));
|
||||||
// Preferrable to put this obligation first, since we report better errors for sized ambiguity.
|
// Preferable to put this obligation first, since we report better errors for sized ambiguity.
|
||||||
self.predicates.insert(0, (trait_ref.without_const().to_predicate(tcx), span));
|
self.predicates.insert(0, (trait_ref.without_const().to_predicate(tcx), span));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -308,7 +308,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
let rcvr_ty = self.node_ty(rcvr.hir_id);
|
let rcvr_ty = self.node_ty(rcvr.hir_id);
|
||||||
// Get the evaluated type *after* calling the method call, so that the influence
|
// Get the evaluated type *after* calling the method call, so that the influence
|
||||||
// of the arguments can be reflected in the receiver type. The receiver
|
// of the arguments can be reflected in the receiver type. The receiver
|
||||||
// expression has the type *before* theis analysis is done.
|
// expression has the type *before* this analysis is done.
|
||||||
let ty = match self.lookup_probe_for_diagnostic(
|
let ty = match self.lookup_probe_for_diagnostic(
|
||||||
segment.ident,
|
segment.ident,
|
||||||
rcvr_ty,
|
rcvr_ty,
|
||||||
|
|
|
@ -120,7 +120,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
ty
|
ty
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn check_expr_coercable_to_type(
|
pub(super) fn check_expr_coercible_to_type(
|
||||||
&self,
|
&self,
|
||||||
expr: &'tcx hir::Expr<'tcx>,
|
expr: &'tcx hir::Expr<'tcx>,
|
||||||
expected: Ty<'tcx>,
|
expected: Ty<'tcx>,
|
||||||
|
@ -1128,7 +1128,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// This is (basically) inlined `check_expr_coercable_to_type`, but we want
|
// This is (basically) inlined `check_expr_coercible_to_type`, but we want
|
||||||
// to suggest an additional fixup here in `suggest_deref_binop`.
|
// to suggest an additional fixup here in `suggest_deref_binop`.
|
||||||
let rhs_ty = self.check_expr_with_hint(&rhs, lhs_ty);
|
let rhs_ty = self.check_expr_with_hint(&rhs, lhs_ty);
|
||||||
if let (_, Some(mut diag)) =
|
if let (_, Some(mut diag)) =
|
||||||
|
@ -1401,7 +1401,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
|
|
||||||
let (element_ty, t) = match uty {
|
let (element_ty, t) = match uty {
|
||||||
Some(uty) => {
|
Some(uty) => {
|
||||||
self.check_expr_coercable_to_type(&element, uty, None);
|
self.check_expr_coercible_to_type(&element, uty, None);
|
||||||
(uty, uty)
|
(uty, uty)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
@ -1478,7 +1478,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds {
|
let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds {
|
||||||
Some(fs) if i < fs.len() => {
|
Some(fs) if i < fs.len() => {
|
||||||
let ety = fs[i];
|
let ety = fs[i];
|
||||||
self.check_expr_coercable_to_type(&e, ety, None);
|
self.check_expr_coercible_to_type(&e, ety, None);
|
||||||
ety
|
ety
|
||||||
}
|
}
|
||||||
_ => self.check_expr_with_expectation(&e, NoExpectation),
|
_ => self.check_expr_with_expectation(&e, NoExpectation),
|
||||||
|
@ -2869,7 +2869,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
) -> Ty<'tcx> {
|
) -> Ty<'tcx> {
|
||||||
match self.resume_yield_tys {
|
match self.resume_yield_tys {
|
||||||
Some((resume_ty, yield_ty)) => {
|
Some((resume_ty, yield_ty)) => {
|
||||||
self.check_expr_coercable_to_type(&value, yield_ty, None);
|
self.check_expr_coercible_to_type(&value, yield_ty, None);
|
||||||
|
|
||||||
resume_ty
|
resume_ty
|
||||||
}
|
}
|
||||||
|
@ -2878,7 +2878,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// information. Hence, we check the source of the yield expression here and check its
|
// information. Hence, we check the source of the yield expression here and check its
|
||||||
// value's type against `()` (this check should always hold).
|
// value's type against `()` (this check should always hold).
|
||||||
None if src.is_await() => {
|
None if src.is_await() => {
|
||||||
self.check_expr_coercable_to_type(&value, self.tcx.mk_unit(), None);
|
self.check_expr_coercible_to_type(&value, self.tcx.mk_unit(), None);
|
||||||
self.tcx.mk_unit()
|
self.tcx.mk_unit()
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -466,7 +466,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
/// obligation. Hence we refine the `expr` "outwards-in" and bail at the first kind of expression/impl we don't recognize.
|
/// obligation. Hence we refine the `expr` "outwards-in" and bail at the first kind of expression/impl we don't recognize.
|
||||||
///
|
///
|
||||||
/// This function returns a `Result<&Expr, &Expr>` - either way, it returns the `Expr` whose span should be
|
/// This function returns a `Result<&Expr, &Expr>` - either way, it returns the `Expr` whose span should be
|
||||||
/// reported as an error. If it is `Ok`, then it means it refined successfull. If it is `Err`, then it may be
|
/// reported as an error. If it is `Ok`, then it means it refined successful. If it is `Err`, then it may be
|
||||||
/// only a partial success - but it cannot be refined even further.
|
/// only a partial success - but it cannot be refined even further.
|
||||||
fn blame_specific_expr_if_possible_for_derived_predicate_obligation(
|
fn blame_specific_expr_if_possible_for_derived_predicate_obligation(
|
||||||
&self,
|
&self,
|
||||||
|
@ -534,7 +534,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
/// - in_ty: `(Option<Vec<T>, bool)`
|
/// - in_ty: `(Option<Vec<T>, bool)`
|
||||||
/// we would drill until we arrive at `vec![1, 2, 3]`.
|
/// we would drill until we arrive at `vec![1, 2, 3]`.
|
||||||
///
|
///
|
||||||
/// If successful, we return `Ok(refined_expr)`. If unsuccesful, we return `Err(partially_refined_expr`),
|
/// If successful, we return `Ok(refined_expr)`. If unsuccessful, we return `Err(partially_refined_expr`),
|
||||||
/// which will go as far as possible. For example, given `(foo(), false)` instead, we would drill to
|
/// which will go as far as possible. For example, given `(foo(), false)` instead, we would drill to
|
||||||
/// `foo()` and then return `Err("foo()")`.
|
/// `foo()` and then return `Err("foo()")`.
|
||||||
///
|
///
|
||||||
|
|
|
@ -1413,7 +1413,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
self.demand_eqtype(init.span, local_ty, init_ty);
|
self.demand_eqtype(init.span, local_ty, init_ty);
|
||||||
init_ty
|
init_ty
|
||||||
} else {
|
} else {
|
||||||
self.check_expr_coercable_to_type(init, local_ty, None)
|
self.check_expr_coercible_to_type(init, local_ty, None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -280,7 +280,7 @@ fn typeck_with_fallback<'tcx>(
|
||||||
// Gather locals in statics (because of block expressions).
|
// Gather locals in statics (because of block expressions).
|
||||||
GatherLocalsVisitor::new(&fcx).visit_body(body);
|
GatherLocalsVisitor::new(&fcx).visit_body(body);
|
||||||
|
|
||||||
fcx.check_expr_coercable_to_type(&body.value, expected_type, None);
|
fcx.check_expr_coercible_to_type(&body.value, expected_type, None);
|
||||||
|
|
||||||
fcx.write_ty(id, expected_type);
|
fcx.write_ty(id, expected_type);
|
||||||
};
|
};
|
||||||
|
|
|
@ -300,7 +300,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
};
|
};
|
||||||
|
|
||||||
// We could pass the file for long types into these two, but it isn't strictly necessary
|
// We could pass the file for long types into these two, but it isn't strictly necessary
|
||||||
// given how targetted they are.
|
// given how targeted they are.
|
||||||
if self.suggest_wrapping_range_with_parens(
|
if self.suggest_wrapping_range_with_parens(
|
||||||
tcx,
|
tcx,
|
||||||
rcvr_ty,
|
rcvr_ty,
|
||||||
|
|
|
@ -103,9 +103,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
match BinOpCategory::from(op) {
|
match BinOpCategory::from(op) {
|
||||||
BinOpCategory::Shortcircuit => {
|
BinOpCategory::Shortcircuit => {
|
||||||
// && and || are a simple case.
|
// && and || are a simple case.
|
||||||
self.check_expr_coercable_to_type(lhs_expr, tcx.types.bool, None);
|
self.check_expr_coercible_to_type(lhs_expr, tcx.types.bool, None);
|
||||||
let lhs_diverges = self.diverges.get();
|
let lhs_diverges = self.diverges.get();
|
||||||
self.check_expr_coercable_to_type(rhs_expr, tcx.types.bool, None);
|
self.check_expr_coercible_to_type(rhs_expr, tcx.types.bool, None);
|
||||||
|
|
||||||
// Depending on the LHS' value, the RHS can never execute.
|
// Depending on the LHS' value, the RHS can never execute.
|
||||||
self.diverges.set(lhs_diverges);
|
self.diverges.set(lhs_diverges);
|
||||||
|
@ -255,7 +255,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
);
|
);
|
||||||
|
|
||||||
// see `NB` above
|
// see `NB` above
|
||||||
let rhs_ty = self.check_expr_coercable_to_type(rhs_expr, rhs_ty_var, Some(lhs_expr));
|
let rhs_ty = self.check_expr_coercible_to_type(rhs_expr, rhs_ty_var, Some(lhs_expr));
|
||||||
let rhs_ty = self.resolve_vars_with_obligations(rhs_ty);
|
let rhs_ty = self.resolve_vars_with_obligations(rhs_ty);
|
||||||
|
|
||||||
let return_ty = match result {
|
let return_ty = match result {
|
||||||
|
|
|
@ -173,7 +173,7 @@ infer_region_explanation = {$pref_kind ->
|
||||||
|
|
||||||
infer_outlives_content = lifetime of reference outlives lifetime of borrowed content...
|
infer_outlives_content = lifetime of reference outlives lifetime of borrowed content...
|
||||||
infer_outlives_bound = lifetime of the source pointer does not outlive lifetime bound of the object type
|
infer_outlives_bound = lifetime of the source pointer does not outlive lifetime bound of the object type
|
||||||
infer_fullfill_req_lifetime = the type `{$ty}` does not fulfill the required lifetime
|
infer_fulfill_req_lifetime = the type `{$ty}` does not fulfill the required lifetime
|
||||||
infer_lf_bound_not_satisfied = lifetime bound not satisfied
|
infer_lf_bound_not_satisfied = lifetime bound not satisfied
|
||||||
infer_borrowed_too_long = a value of type `{$ty}` is borrowed for too long
|
infer_borrowed_too_long = a value of type `{$ty}` is borrowed for too long
|
||||||
infer_ref_longer_than_data = in type `{$ty}`, reference has a longer lifetime than the data it references
|
infer_ref_longer_than_data = in type `{$ty}`, reference has a longer lifetime than the data it references
|
||||||
|
|
|
@ -53,7 +53,7 @@ pub struct AnnotationRequired<'a> {
|
||||||
// Copy of `AnnotationRequired` for E0283
|
// Copy of `AnnotationRequired` for E0283
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(infer_type_annotations_needed, code = "E0283")]
|
#[diag(infer_type_annotations_needed, code = "E0283")]
|
||||||
pub struct AmbigousImpl<'a> {
|
pub struct AmbiguousImpl<'a> {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub source_kind: &'static str,
|
pub source_kind: &'static str,
|
||||||
|
@ -942,8 +942,8 @@ pub struct OutlivesBound<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(infer_fullfill_req_lifetime, code = "E0477")]
|
#[diag(infer_fulfill_req_lifetime, code = "E0477")]
|
||||||
pub struct FullfillReqLifetime<'a> {
|
pub struct FulfillReqLifetime<'a> {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub ty: Ty<'a>,
|
pub ty: Ty<'a>,
|
||||||
|
|
|
@ -1819,7 +1819,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
// will try to hide in some case such as `async fn`, so
|
// will try to hide in some case such as `async fn`, so
|
||||||
// to make an error more use friendly we will
|
// to make an error more use friendly we will
|
||||||
// avoid to suggest a mismatch type with a
|
// avoid to suggest a mismatch type with a
|
||||||
// type that the user usually are not usign
|
// type that the user usually are not using
|
||||||
// directly such as `impl Future<Output = u8>`.
|
// directly such as `impl Future<Output = u8>`.
|
||||||
if !self.tcx.ty_is_opaque_future(found_ty) {
|
if !self.tcx.ty_is_opaque_future(found_ty) {
|
||||||
diag.note_expected_found_extra(
|
diag.note_expected_found_extra(
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
AmbigousImpl, AmbigousReturn, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
|
AmbigousReturn, AmbiguousImpl, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
|
||||||
SourceKindMultiSuggestion, SourceKindSubdiag,
|
SourceKindMultiSuggestion, SourceKindSubdiag,
|
||||||
};
|
};
|
||||||
use crate::infer::error_reporting::TypeErrCtxt;
|
use crate::infer::error_reporting::TypeErrCtxt;
|
||||||
|
@ -358,7 +358,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
bad_label,
|
bad_label,
|
||||||
}
|
}
|
||||||
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
|
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
|
||||||
TypeAnnotationNeeded::E0283 => AmbigousImpl {
|
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
|
||||||
span,
|
span,
|
||||||
source_kind,
|
source_kind,
|
||||||
source_name,
|
source_name,
|
||||||
|
@ -563,7 +563,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
bad_label: None,
|
bad_label: None,
|
||||||
}
|
}
|
||||||
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
|
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
|
||||||
TypeAnnotationNeeded::E0283 => AmbigousImpl {
|
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
|
||||||
span,
|
span,
|
||||||
source_kind,
|
source_kind,
|
||||||
source_name: &name,
|
source_name: &name,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
note_and_explain, FullfillReqLifetime, LfBoundNotSatisfied, OutlivesBound, OutlivesContent,
|
note_and_explain, FulfillReqLifetime, LfBoundNotSatisfied, OutlivesBound, OutlivesContent,
|
||||||
RefLongerThanData, RegionOriginNote, WhereClauseSuggestions,
|
RefLongerThanData, RegionOriginNote, WhereClauseSuggestions,
|
||||||
};
|
};
|
||||||
use crate::fluent_generated as fluent;
|
use crate::fluent_generated as fluent;
|
||||||
|
@ -176,7 +176,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||||
let note = note_and_explain::RegionExplanation::new(
|
let note = note_and_explain::RegionExplanation::new(
|
||||||
self.tcx, sub, opt_span, prefix, suffix,
|
self.tcx, sub, opt_span, prefix, suffix,
|
||||||
);
|
);
|
||||||
FullfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note }
|
FulfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note }
|
||||||
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic)
|
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic)
|
||||||
}
|
}
|
||||||
infer::RelateRegionParamBound(span) => {
|
infer::RelateRegionParamBound(span) => {
|
||||||
|
|
|
@ -91,7 +91,7 @@ lint_ty_qualified = usage of qualified `ty::{$ty}`
|
||||||
lint_lintpass_by_hand = implementing `LintPass` by hand
|
lint_lintpass_by_hand = implementing `LintPass` by hand
|
||||||
.help = try using `declare_lint_pass!` or `impl_lint_pass!` instead
|
.help = try using `declare_lint_pass!` or `impl_lint_pass!` instead
|
||||||
|
|
||||||
lint_non_existant_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]`
|
lint_non_existent_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]`
|
||||||
.help = only existing keywords are allowed in core/std
|
.help = only existing keywords are allowed in core/std
|
||||||
|
|
||||||
lint_diag_out_of_impl =
|
lint_diag_out_of_impl =
|
||||||
|
@ -107,7 +107,7 @@ lint_cstring_ptr = getting the inner pointer of a temporary `CString`
|
||||||
.note = pointers do not have a lifetime; when calling `as_ptr` the `CString` will be deallocated at the end of the statement because nothing is referencing it as far as the type system is concerned
|
.note = pointers do not have a lifetime; when calling `as_ptr` the `CString` will be deallocated at the end of the statement because nothing is referencing it as far as the type system is concerned
|
||||||
.help = for more information, see https://doc.rust-lang.org/reference/destructors.html
|
.help = for more information, see https://doc.rust-lang.org/reference/destructors.html
|
||||||
|
|
||||||
lint_multple_supertrait_upcastable = `{$ident}` is object-safe and has multiple supertraits
|
lint_multiple_supertrait_upcastable = `{$ident}` is object-safe and has multiple supertraits
|
||||||
|
|
||||||
lint_identifier_non_ascii_char = identifier contains non-ASCII characters
|
lint_identifier_non_ascii_char = identifier contains non-ASCII characters
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
//! Clippy.
|
//! Clippy.
|
||||||
|
|
||||||
use crate::lints::{
|
use crate::lints::{
|
||||||
BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistantDocKeyword,
|
BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistentDocKeyword,
|
||||||
QueryInstability, TyQualified, TykindDiag, TykindKind, UntranslatableDiag,
|
QueryInstability, TyQualified, TykindDiag, TykindKind, UntranslatableDiag,
|
||||||
};
|
};
|
||||||
use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
|
use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
|
||||||
|
@ -334,7 +334,7 @@ impl<'tcx> LateLintPass<'tcx> for ExistingDocKeyword {
|
||||||
cx.emit_spanned_lint(
|
cx.emit_spanned_lint(
|
||||||
EXISTING_DOC_KEYWORD,
|
EXISTING_DOC_KEYWORD,
|
||||||
attr.span,
|
attr.span,
|
||||||
NonExistantDocKeyword { keyword },
|
NonExistentDocKeyword { keyword },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -424,7 +424,7 @@ impl LateLintPass<'_> for Diagnostics {
|
||||||
}
|
}
|
||||||
|
|
||||||
declare_tool_lint! {
|
declare_tool_lint! {
|
||||||
/// The `bad_opt_access` lint detects accessing options by field instad of
|
/// The `bad_opt_access` lint detects accessing options by field instead of
|
||||||
/// the wrapper function.
|
/// the wrapper function.
|
||||||
pub rustc::BAD_OPT_ACCESS,
|
pub rustc::BAD_OPT_ACCESS,
|
||||||
Deny,
|
Deny,
|
||||||
|
|
|
@ -25,7 +25,7 @@ declare_lint! {
|
||||||
///
|
///
|
||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// #[warn(let_underscore_drop)]
|
/// #[warn(let_underscore_drop)]
|
||||||
/// // SomeStuct is dropped immediately instead of at end of scope,
|
/// // SomeStruct is dropped immediately instead of at end of scope,
|
||||||
/// // so "Dropping SomeStruct" is printed before "end of main".
|
/// // so "Dropping SomeStruct" is printed before "end of main".
|
||||||
/// // The order of prints would be reversed if SomeStruct was bound to
|
/// // The order of prints would be reversed if SomeStruct was bound to
|
||||||
/// // a name (such as "_foo").
|
/// // a name (such as "_foo").
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
||||||
fluent_generated as fluent,
|
fluent_generated as fluent,
|
||||||
late::unerased_lint_store,
|
late::unerased_lint_store,
|
||||||
lints::{
|
lints::{
|
||||||
DeprecatedLintName, IgnoredUnlessCrateSpecified, OverruledAtributeLint,
|
DeprecatedLintName, IgnoredUnlessCrateSpecified, OverruledAttributeLint,
|
||||||
RenamedOrRemovedLint, RenamedOrRemovedLintSuggestion, UnknownLint, UnknownLintSuggestion,
|
RenamedOrRemovedLint, RenamedOrRemovedLintSuggestion, UnknownLint, UnknownLintSuggestion,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -612,7 +612,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
|
||||||
self.emit_spanned_lint(
|
self.emit_spanned_lint(
|
||||||
FORBIDDEN_LINT_GROUPS,
|
FORBIDDEN_LINT_GROUPS,
|
||||||
src.span().into(),
|
src.span().into(),
|
||||||
OverruledAtributeLint {
|
OverruledAttributeLint {
|
||||||
overruled: src.span(),
|
overruled: src.span(),
|
||||||
lint_level: level.as_str(),
|
lint_level: level.as_str(),
|
||||||
lint_source: src.name(),
|
lint_source: src.name(),
|
||||||
|
|
|
@ -806,9 +806,9 @@ pub struct TyQualified {
|
||||||
pub struct LintPassByHand;
|
pub struct LintPassByHand;
|
||||||
|
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
#[diag(lint_non_existant_doc_keyword)]
|
#[diag(lint_non_existent_doc_keyword)]
|
||||||
#[help]
|
#[help]
|
||||||
pub struct NonExistantDocKeyword {
|
pub struct NonExistentDocKeyword {
|
||||||
pub keyword: Symbol,
|
pub keyword: Symbol,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -875,7 +875,7 @@ impl AddToDiagnostic for NonBindingLetSub {
|
||||||
// levels.rs
|
// levels.rs
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
#[diag(lint_overruled_attribute)]
|
#[diag(lint_overruled_attribute)]
|
||||||
pub struct OverruledAtributeLint<'a> {
|
pub struct OverruledAttributeLint<'a> {
|
||||||
#[label]
|
#[label]
|
||||||
pub overruled: Span,
|
pub overruled: Span,
|
||||||
pub lint_level: &'a str,
|
pub lint_level: &'a str,
|
||||||
|
@ -947,7 +947,7 @@ pub struct CStringPtr {
|
||||||
|
|
||||||
// multiple_supertrait_upcastable.rs
|
// multiple_supertrait_upcastable.rs
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
#[diag(lint_multple_supertrait_upcastable)]
|
#[diag(lint_multiple_supertrait_upcastable)]
|
||||||
pub struct MultipleSupertraitUpcastable {
|
pub struct MultipleSupertraitUpcastable {
|
||||||
pub ident: Ident,
|
pub ident: Ident,
|
||||||
}
|
}
|
||||||
|
@ -1422,7 +1422,7 @@ pub struct UnusedResult<'a> {
|
||||||
pub ty: Ty<'a>,
|
pub ty: Ty<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME(davidtwco): this isn't properly translatable becauses of the
|
// FIXME(davidtwco): this isn't properly translatable because of the
|
||||||
// pre/post strings
|
// pre/post strings
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
#[diag(lint_unused_closure)]
|
#[diag(lint_unused_closure)]
|
||||||
|
@ -1433,7 +1433,7 @@ pub struct UnusedClosure<'a> {
|
||||||
pub post: &'a str,
|
pub post: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME(davidtwco): this isn't properly translatable becauses of the
|
// FIXME(davidtwco): this isn't properly translatable because of the
|
||||||
// pre/post strings
|
// pre/post strings
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
#[diag(lint_unused_generator)]
|
#[diag(lint_unused_generator)]
|
||||||
|
@ -1444,7 +1444,7 @@ pub struct UnusedGenerator<'a> {
|
||||||
pub post: &'a str,
|
pub post: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME(davidtwco): this isn't properly translatable becauses of the pre/post
|
// FIXME(davidtwco): this isn't properly translatable because of the pre/post
|
||||||
// strings
|
// strings
|
||||||
pub struct UnusedDef<'a, 'b> {
|
pub struct UnusedDef<'a, 'b> {
|
||||||
pub pre: &'a str,
|
pub pre: &'a str,
|
||||||
|
|
|
@ -250,7 +250,7 @@ impl EarlyLintPass for NonAsciiIdents {
|
||||||
let latin_augmented_script_set = AugmentedScriptSet::for_char('A');
|
let latin_augmented_script_set = AugmentedScriptSet::for_char('A');
|
||||||
script_states.insert(latin_augmented_script_set, ScriptSetUsage::Verified);
|
script_states.insert(latin_augmented_script_set, ScriptSetUsage::Verified);
|
||||||
|
|
||||||
let mut has_suspicous = false;
|
let mut has_suspicious = false;
|
||||||
for (symbol, &sp) in symbols.iter() {
|
for (symbol, &sp) in symbols.iter() {
|
||||||
let symbol_str = symbol.as_str();
|
let symbol_str = symbol.as_str();
|
||||||
for ch in symbol_str.chars() {
|
for ch in symbol_str.chars() {
|
||||||
|
@ -278,14 +278,14 @@ impl EarlyLintPass for NonAsciiIdents {
|
||||||
if !is_potential_mixed_script_confusable_char(ch) {
|
if !is_potential_mixed_script_confusable_char(ch) {
|
||||||
ScriptSetUsage::Verified
|
ScriptSetUsage::Verified
|
||||||
} else {
|
} else {
|
||||||
has_suspicous = true;
|
has_suspicious = true;
|
||||||
ScriptSetUsage::Suspicious(vec![ch], sp)
|
ScriptSetUsage::Suspicious(vec![ch], sp)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if has_suspicous {
|
if has_suspicious {
|
||||||
let verified_augmented_script_sets = script_states
|
let verified_augmented_script_sets = script_states
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|(k, v)| match v {
|
.flat_map(|(k, v)| match v {
|
||||||
|
|
|
@ -532,7 +532,7 @@ pub enum BuiltinLintDiagnostics {
|
||||||
AmbiguousGlobReexports {
|
AmbiguousGlobReexports {
|
||||||
/// The name for which collision(s) have occurred.
|
/// The name for which collision(s) have occurred.
|
||||||
name: String,
|
name: String,
|
||||||
/// The name space for whihc the collision(s) occurred in.
|
/// The name space for which the collision(s) occurred in.
|
||||||
namespace: String,
|
namespace: String,
|
||||||
/// Span where the name is first re-exported.
|
/// Span where the name is first re-exported.
|
||||||
first_reexport_span: Span,
|
first_reexport_span: Span,
|
||||||
|
|
|
@ -58,7 +58,7 @@ impl<'tcx> UnifyValue for UnifiedRegion<'tcx> {
|
||||||
|
|
||||||
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
|
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
|
||||||
// We pick the value of the least universe because it is compatible with more variables.
|
// We pick the value of the least universe because it is compatible with more variables.
|
||||||
// This is *not* neccessary for soundness, but it allows more region variables to be
|
// This is *not* necessary for soundness, but it allows more region variables to be
|
||||||
// resolved to the said value.
|
// resolved to the said value.
|
||||||
#[cold]
|
#[cold]
|
||||||
fn min_universe<'tcx>(r1: Region<'tcx>, r2: Region<'tcx>) -> Region<'tcx> {
|
fn min_universe<'tcx>(r1: Region<'tcx>, r2: Region<'tcx>) -> Region<'tcx> {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
/// A macro for triggering an ICE.
|
/// A macro for triggering an ICE.
|
||||||
/// Calling `bug` instead of panicking will result in a nicer error message and should
|
/// Calling `bug` instead of panicking will result in a nicer error message and should
|
||||||
/// therefore be prefered over `panic`/`unreachable` or others.
|
/// therefore be preferred over `panic`/`unreachable` or others.
|
||||||
///
|
///
|
||||||
/// If you have a span available, you should use [`span_bug`] instead.
|
/// If you have a span available, you should use [`span_bug`] instead.
|
||||||
///
|
///
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl InitMask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets a specified range to a value. If the range is out-of-bounds, the mask will grow to
|
/// Sets a specified range to a value. If the range is out-of-bounds, the mask will grow to
|
||||||
/// accomodate it entirely.
|
/// accommodate it entirely.
|
||||||
pub fn set_range(&mut self, range: AllocRange, new_state: bool) {
|
pub fn set_range(&mut self, range: AllocRange, new_state: bool) {
|
||||||
let start = range.start;
|
let start = range.start;
|
||||||
let end = range.end();
|
let end = range.end();
|
||||||
|
|
|
@ -14,7 +14,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||||
#[derive(HashStable)]
|
#[derive(HashStable)]
|
||||||
pub struct ProvenanceMap<Prov = AllocId> {
|
pub struct ProvenanceMap<Prov = AllocId> {
|
||||||
/// Provenance in this map applies from the given offset for an entire pointer-size worth of
|
/// Provenance in this map applies from the given offset for an entire pointer-size worth of
|
||||||
/// bytes. Two entires in this map are always at least a pointer size apart.
|
/// bytes. Two entries in this map are always at least a pointer size apart.
|
||||||
ptrs: SortedMap<Size, Prov>,
|
ptrs: SortedMap<Size, Prov>,
|
||||||
/// Provenance in this map only applies to the given single byte.
|
/// Provenance in this map only applies to the given single byte.
|
||||||
/// This map is disjoint from the previous. It will always be empty when
|
/// This map is disjoint from the previous. It will always be empty when
|
||||||
|
|
|
@ -37,7 +37,7 @@ pub fn erase<T: EraseType>(src: T) -> Erase<T> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn restore<T: EraseType>(value: Erase<T>) -> T {
|
pub fn restore<T: EraseType>(value: Erase<T>) -> T {
|
||||||
let value: Erased<<T as EraseType>::Result> = value;
|
let value: Erased<<T as EraseType>::Result> = value;
|
||||||
// SAFETY: Due to the use of impl Trait in `Erase` the only way to safetly create an instance
|
// SAFETY: Due to the use of impl Trait in `Erase` the only way to safely create an instance
|
||||||
// of `Erase` is to call `erase`, so we know that `value.data` is a valid instance of `T` of
|
// of `Erase` is to call `erase`, so we know that `value.data` is a valid instance of `T` of
|
||||||
// the right size.
|
// the right size.
|
||||||
unsafe { transmute_copy(&value.data) }
|
unsafe { transmute_copy(&value.data) }
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub trait Key: Sized {
|
||||||
//
|
//
|
||||||
// ...But r-a doesn't support them yet and using a default here causes r-a to not infer
|
// ...But r-a doesn't support them yet and using a default here causes r-a to not infer
|
||||||
// return types of queries which is very annoying. Thus, until r-a support associated
|
// return types of queries which is very annoying. Thus, until r-a support associated
|
||||||
// type defaults, plese restrain from using them here <3
|
// type defaults, please restrain from using them here <3
|
||||||
//
|
//
|
||||||
// r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693>
|
// r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693>
|
||||||
type CacheSelector;
|
type CacheSelector;
|
||||||
|
|
|
@ -97,7 +97,7 @@ rustc_queries! {
|
||||||
|
|
||||||
/// Gives access to the HIR ID for the given `LocalDefId` owner `key` if any.
|
/// Gives access to the HIR ID for the given `LocalDefId` owner `key` if any.
|
||||||
///
|
///
|
||||||
/// Definitions that were generated with no HIR, would be feeded to return `None`.
|
/// Definitions that were generated with no HIR, would be fed to return `None`.
|
||||||
query opt_local_def_id_to_hir_id(key: LocalDefId) -> Option<hir::HirId>{
|
query opt_local_def_id_to_hir_id(key: LocalDefId) -> Option<hir::HirId>{
|
||||||
desc { |tcx| "getting HIR ID of `{}`", tcx.def_path_str(key.to_def_id()) }
|
desc { |tcx| "getting HIR ID of `{}`", tcx.def_path_str(key.to_def_id()) }
|
||||||
feedable
|
feedable
|
||||||
|
|
|
@ -1347,7 +1347,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||||
p!(write("{}::{}", self.tcx().crate_name(def.did.krate), self.tcx().def_path(def.did).to_string_no_crate_verbose()))
|
p!(write("{}::{}", self.tcx().crate_name(def.did.krate), self.tcx().def_path(def.did).to_string_no_crate_verbose()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
defkind => bug!("`{:?}` has unexpcted defkind {:?}", ct, defkind),
|
defkind => bug!("`{:?}` has unexpected defkind {:?}", ct, defkind),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::ConstKind::Infer(infer_ct) => {
|
ty::ConstKind::Infer(infer_ct) => {
|
||||||
|
|
|
@ -1621,19 +1621,24 @@ impl<'tcx> Region<'tcx> {
|
||||||
|
|
||||||
pub fn get_name(self) -> Option<Symbol> {
|
pub fn get_name(self) -> Option<Symbol> {
|
||||||
if self.has_name() {
|
if self.has_name() {
|
||||||
let name = match *self {
|
match *self {
|
||||||
ty::ReEarlyBound(ebr) => Some(ebr.name),
|
ty::ReEarlyBound(ebr) => Some(ebr.name),
|
||||||
ty::ReLateBound(_, br) => br.kind.get_name(),
|
ty::ReLateBound(_, br) => br.kind.get_name(),
|
||||||
ty::ReFree(fr) => fr.bound_region.get_name(),
|
ty::ReFree(fr) => fr.bound_region.get_name(),
|
||||||
ty::ReStatic => Some(kw::StaticLifetime),
|
ty::ReStatic => Some(kw::StaticLifetime),
|
||||||
ty::RePlaceholder(placeholder) => placeholder.bound.kind.get_name(),
|
ty::RePlaceholder(placeholder) => placeholder.bound.kind.get_name(),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
}
|
||||||
|
} else {
|
||||||
return name;
|
None
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
None
|
pub fn get_name_or_anon(self) -> Symbol {
|
||||||
|
match self.get_name() {
|
||||||
|
Some(name) => name,
|
||||||
|
None => sym::anon,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Is this region named by the user?
|
/// Is this region named by the user?
|
||||||
|
|
|
@ -593,7 +593,7 @@ pub struct MultipleMutBorrows {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
#[subdiagnostic]
|
#[subdiagnostic]
|
||||||
pub occurences: Vec<Conflict>,
|
pub occurrences: Vec<Conflict>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
|
@ -602,7 +602,7 @@ pub struct AlreadyBorrowed {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
#[subdiagnostic]
|
#[subdiagnostic]
|
||||||
pub occurences: Vec<Conflict>,
|
pub occurrences: Vec<Conflict>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
|
@ -611,7 +611,7 @@ pub struct AlreadyMutBorrowed {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
#[subdiagnostic]
|
#[subdiagnostic]
|
||||||
pub occurences: Vec<Conflict>,
|
pub occurrences: Vec<Conflict>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
|
@ -620,7 +620,7 @@ pub struct MovedWhileBorrowed {
|
||||||
#[primary_span]
|
#[primary_span]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
#[subdiagnostic]
|
#[subdiagnostic]
|
||||||
pub occurences: Vec<Conflict>,
|
pub occurrences: Vec<Conflict>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Subdiagnostic)]
|
#[derive(Subdiagnostic)]
|
||||||
|
|
|
@ -966,30 +966,30 @@ fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>,
|
||||||
let report_mut_ref = !conflicts_mut_ref.is_empty();
|
let report_mut_ref = !conflicts_mut_ref.is_empty();
|
||||||
let report_move_conflict = !conflicts_move.is_empty();
|
let report_move_conflict = !conflicts_move.is_empty();
|
||||||
|
|
||||||
let mut occurences = match mut_outer {
|
let mut occurrences = match mut_outer {
|
||||||
Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }],
|
Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }],
|
||||||
Mutability::Not => vec![Conflict::Ref { span: pat.span, name }],
|
Mutability::Not => vec![Conflict::Ref { span: pat.span, name }],
|
||||||
};
|
};
|
||||||
occurences.extend(conflicts_mut_mut);
|
occurrences.extend(conflicts_mut_mut);
|
||||||
occurences.extend(conflicts_mut_ref);
|
occurrences.extend(conflicts_mut_ref);
|
||||||
occurences.extend(conflicts_move);
|
occurrences.extend(conflicts_move);
|
||||||
|
|
||||||
// Report errors if any.
|
// Report errors if any.
|
||||||
if report_mut_mut {
|
if report_mut_mut {
|
||||||
// Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`.
|
// Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`.
|
||||||
sess.emit_err(MultipleMutBorrows { span: pat.span, occurences });
|
sess.emit_err(MultipleMutBorrows { span: pat.span, occurrences });
|
||||||
} else if report_mut_ref {
|
} else if report_mut_ref {
|
||||||
// Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse.
|
// Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse.
|
||||||
match mut_outer {
|
match mut_outer {
|
||||||
Mutability::Mut => {
|
Mutability::Mut => {
|
||||||
sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurences });
|
sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurrences });
|
||||||
}
|
}
|
||||||
Mutability::Not => {
|
Mutability::Not => {
|
||||||
sess.emit_err(AlreadyBorrowed { span: pat.span, occurences });
|
sess.emit_err(AlreadyBorrowed { span: pat.span, occurrences });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else if report_move_conflict {
|
} else if report_move_conflict {
|
||||||
// Report by-ref and by-move conflicts, e.g. `ref x @ y`.
|
// Report by-ref and by-move conflicts, e.g. `ref x @ y`.
|
||||||
sess.emit_err(MovedWhileBorrowed { span: pat.span, occurences });
|
sess.emit_err(MovedWhileBorrowed { span: pat.span, occurrences });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -394,8 +394,8 @@ where
|
||||||
) -> io::Result<()> {
|
) -> io::Result<()> {
|
||||||
let diffs = StateDiffCollector::run(body, block, self.results.results(), self.style);
|
let diffs = StateDiffCollector::run(body, block, self.results.results(), self.style);
|
||||||
|
|
||||||
let mut befores = diffs.before.map(|v| v.into_iter());
|
let mut diffs_before = diffs.before.map(|v| v.into_iter());
|
||||||
let mut afters = diffs.after.into_iter();
|
let mut diffs_after = diffs.after.into_iter();
|
||||||
|
|
||||||
let next_in_dataflow_order = |it: &mut std::vec::IntoIter<_>| {
|
let next_in_dataflow_order = |it: &mut std::vec::IntoIter<_>| {
|
||||||
if A::Direction::IS_FORWARD { it.next().unwrap() } else { it.next_back().unwrap() }
|
if A::Direction::IS_FORWARD { it.next().unwrap() } else { it.next_back().unwrap() }
|
||||||
|
@ -405,8 +405,8 @@ where
|
||||||
let statement_str = format!("{statement:?}");
|
let statement_str = format!("{statement:?}");
|
||||||
let index_str = format!("{i}");
|
let index_str = format!("{i}");
|
||||||
|
|
||||||
let after = next_in_dataflow_order(&mut afters);
|
let after = next_in_dataflow_order(&mut diffs_after);
|
||||||
let before = befores.as_mut().map(next_in_dataflow_order);
|
let before = diffs_before.as_mut().map(next_in_dataflow_order);
|
||||||
|
|
||||||
self.write_row(w, &index_str, &statement_str, |_this, w, fmt| {
|
self.write_row(w, &index_str, &statement_str, |_this, w, fmt| {
|
||||||
if let Some(before) = before {
|
if let Some(before) = before {
|
||||||
|
@ -417,11 +417,11 @@ where
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let after = next_in_dataflow_order(&mut afters);
|
let after = next_in_dataflow_order(&mut diffs_after);
|
||||||
let before = befores.as_mut().map(next_in_dataflow_order);
|
let before = diffs_before.as_mut().map(next_in_dataflow_order);
|
||||||
|
|
||||||
assert!(afters.is_empty());
|
assert!(diffs_after.is_empty());
|
||||||
assert!(befores.as_ref().map_or(true, ExactSizeIterator::is_empty));
|
assert!(diffs_before.as_ref().map_or(true, ExactSizeIterator::is_empty));
|
||||||
|
|
||||||
let terminator = body[block].terminator();
|
let terminator = body[block].terminator();
|
||||||
let mut terminator_str = String::new();
|
let mut terminator_str = String::new();
|
||||||
|
|
|
@ -83,7 +83,7 @@
|
||||||
//! that ever have their address taken. Of course that requires actually having alias analysis
|
//! that ever have their address taken. Of course that requires actually having alias analysis
|
||||||
//! (and a model to build it on), so this might be a bit of a ways off.
|
//! (and a model to build it on), so this might be a bit of a ways off.
|
||||||
//!
|
//!
|
||||||
//! * Various perf improvents. There are a bunch of comments in here marked `PERF` with ideas for
|
//! * Various perf improvements. There are a bunch of comments in here marked `PERF` with ideas for
|
||||||
//! how to do things more efficiently. However, the complexity of the pass as a whole should be
|
//! how to do things more efficiently. However, the complexity of the pass as a whole should be
|
||||||
//! kept in mind.
|
//! kept in mind.
|
||||||
//!
|
//!
|
||||||
|
|
|
@ -99,7 +99,7 @@ where
|
||||||
//
|
//
|
||||||
// This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to
|
// This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to
|
||||||
// turn it into just `x` later. Without the unreachable, such a transformation would be illegal.
|
// turn it into just `x` later. Without the unreachable, such a transformation would be illegal.
|
||||||
// If the otherwise branch is unreachable, we can delete all other unreacahble targets, as they will
|
// If the otherwise branch is unreachable, we can delete all other unreachable targets, as they will
|
||||||
// still point to the unreachable and therefore not lose reachability information.
|
// still point to the unreachable and therefore not lose reachability information.
|
||||||
let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb));
|
let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb));
|
||||||
|
|
||||||
|
|
|
@ -474,7 +474,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
|
||||||
(tcx.arena.alloc(mono_items), codegen_units)
|
(tcx.arena.alloc(mono_items), codegen_units)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Outputs stats about instantation counts and estimated size, per `MonoItem`'s
|
/// Outputs stats about instantiation counts and estimated size, per `MonoItem`'s
|
||||||
/// def, to a file in the given output directory.
|
/// def, to a file in the given output directory.
|
||||||
fn dump_mono_items_stats<'tcx>(
|
fn dump_mono_items_stats<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
|
|
|
@ -738,3 +738,7 @@ parse_box_syntax_removed = `box_syntax` has been removed
|
||||||
parse_bad_return_type_notation_output =
|
parse_bad_return_type_notation_output =
|
||||||
return type not allowed with return type notation
|
return type not allowed with return type notation
|
||||||
.suggestion = remove the return type
|
.suggestion = remove the return type
|
||||||
|
|
||||||
|
parse_bad_return_type_notation_dotdot =
|
||||||
|
return type notation uses `()` instead of `(..)` for elided arguments
|
||||||
|
.suggestion = remove the `..`
|
||||||
|
|
|
@ -2324,3 +2324,11 @@ pub(crate) struct BadReturnTypeNotationOutput {
|
||||||
#[suggestion(code = "", applicability = "maybe-incorrect")]
|
#[suggestion(code = "", applicability = "maybe-incorrect")]
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parse_bad_return_type_notation_dotdot)]
|
||||||
|
pub(crate) struct BadReturnTypeNotationDotDot {
|
||||||
|
#[primary_span]
|
||||||
|
#[suggestion(code = "", applicability = "maybe-incorrect")]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ pub struct TokenTreeDiagInfo {
|
||||||
pub matching_block_spans: Vec<(Span, Span)>,
|
pub matching_block_spans: Vec<(Span, Span)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn same_identation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
|
pub fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
|
||||||
match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
|
match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
|
||||||
(Some(open_padding), Some(close_padding)) => open_padding == close_padding,
|
(Some(open_padding), Some(close_padding)) => open_padding == close_padding,
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -67,13 +67,13 @@ pub fn report_suspicious_mismatch_block(
|
||||||
let mut matched_spans: Vec<(Span, bool)> = diag_info
|
let mut matched_spans: Vec<(Span, bool)> = diag_info
|
||||||
.matching_block_spans
|
.matching_block_spans
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&(open, close)| (open.with_hi(close.lo()), same_identation_level(sm, open, close)))
|
.map(|&(open, close)| (open.with_hi(close.lo()), same_indentation_level(sm, open, close)))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// sort by `lo`, so the large block spans in the front
|
// sort by `lo`, so the large block spans in the front
|
||||||
matched_spans.sort_by_key(|(span, _)| span.lo());
|
matched_spans.sort_by_key(|(span, _)| span.lo());
|
||||||
|
|
||||||
// We use larger block whose identation is well to cover those inner mismatched blocks
|
// We use larger block whose indentation is well to cover those inner mismatched blocks
|
||||||
// O(N^2) here, but we are on error reporting path, so it is fine
|
// O(N^2) here, but we are on error reporting path, so it is fine
|
||||||
for i in 0..matched_spans.len() {
|
for i in 0..matched_spans.len() {
|
||||||
let (block_span, same_ident) = matched_spans[i];
|
let (block_span, same_ident) = matched_spans[i];
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use super::diagnostics::report_suspicious_mismatch_block;
|
use super::diagnostics::report_suspicious_mismatch_block;
|
||||||
use super::diagnostics::same_identation_level;
|
use super::diagnostics::same_indentation_level;
|
||||||
use super::diagnostics::TokenTreeDiagInfo;
|
use super::diagnostics::TokenTreeDiagInfo;
|
||||||
use super::{StringReader, UnmatchedDelim};
|
use super::{StringReader, UnmatchedDelim};
|
||||||
use rustc_ast::token::{self, Delimiter, Token};
|
use rustc_ast::token::{self, Delimiter, Token};
|
||||||
|
@ -153,7 +153,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||||
unclosed_delimiter = Some(sp);
|
unclosed_delimiter = Some(sp);
|
||||||
};
|
};
|
||||||
for (brace, brace_span) in &self.diag_info.open_braces {
|
for (brace, brace_span) in &self.diag_info.open_braces {
|
||||||
if same_identation_level(&sm, self.token.span, *brace_span)
|
if same_indentation_level(&sm, self.token.span, *brace_span)
|
||||||
&& brace == &close_delim
|
&& brace == &close_delim
|
||||||
{
|
{
|
||||||
// high likelihood of these two corresponding
|
// high likelihood of these two corresponding
|
||||||
|
|
|
@ -2767,7 +2767,7 @@ impl<'a> Parser<'a> {
|
||||||
(token::DotDotEq, token::Gt)
|
(token::DotDotEq, token::Gt)
|
||||||
) {
|
) {
|
||||||
// `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
|
// `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
|
||||||
// so we supress the error here
|
// so we suppress the error here
|
||||||
err.delay_as_bug();
|
err.delay_as_bug();
|
||||||
this.bump();
|
this.bump();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -290,16 +290,17 @@ impl<'a> Parser<'a> {
|
||||||
})?;
|
})?;
|
||||||
let span = lo.to(self.prev_token.span);
|
let span = lo.to(self.prev_token.span);
|
||||||
AngleBracketedArgs { args, span }.into()
|
AngleBracketedArgs { args, span }.into()
|
||||||
} else if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
} else if self.may_recover()
|
||||||
|
&& self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||||
// FIXME(return_type_notation): Could also recover `...` here.
|
// FIXME(return_type_notation): Could also recover `...` here.
|
||||||
&& self.look_ahead(1, |tok| tok.kind == token::DotDot)
|
&& self.look_ahead(1, |tok| tok.kind == token::DotDot)
|
||||||
{
|
{
|
||||||
let lo = self.token.span;
|
|
||||||
self.bump();
|
self.bump();
|
||||||
|
self.sess
|
||||||
|
.emit_err(errors::BadReturnTypeNotationDotDot { span: self.token.span });
|
||||||
self.bump();
|
self.bump();
|
||||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||||
let span = lo.to(self.prev_token.span);
|
let span = lo.to(self.prev_token.span);
|
||||||
self.sess.gated_spans.gate(sym::return_type_notation, span);
|
|
||||||
|
|
||||||
if self.eat_noexpect(&token::RArrow) {
|
if self.eat_noexpect(&token::RArrow) {
|
||||||
let lo = self.prev_token.span;
|
let lo = self.prev_token.span;
|
||||||
|
@ -308,7 +309,13 @@ impl<'a> Parser<'a> {
|
||||||
.emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
|
.emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
|
||||||
}
|
}
|
||||||
|
|
||||||
P(GenericArgs::ReturnTypeNotation(span))
|
ParenthesizedArgs {
|
||||||
|
span,
|
||||||
|
inputs: ThinVec::new(),
|
||||||
|
inputs_span: span,
|
||||||
|
output: ast::FnRetTy::Default(self.prev_token.span.shrink_to_hi()),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
} else {
|
} else {
|
||||||
// `(T, U) -> R`
|
// `(T, U) -> R`
|
||||||
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
|
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
|
||||||
|
@ -566,13 +573,13 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let span = lo.to(self.prev_token.span);
|
let span = lo.to(self.prev_token.span);
|
||||||
|
|
||||||
// Gate associated type bounds, e.g., `Iterator<Item: Ord>`.
|
// Gate associated type bounds, e.g., `Iterator<Item: Ord>`.
|
||||||
if let AssocConstraintKind::Bound { .. } = kind {
|
if let AssocConstraintKind::Bound { .. } = kind {
|
||||||
if gen_args.as_ref().map_or(false, |args| {
|
if let Some(ast::GenericArgs::Parenthesized(args)) = &gen_args
|
||||||
matches!(args, GenericArgs::ReturnTypeNotation(..))
|
&& args.inputs.is_empty()
|
||||||
}) {
|
&& matches!(args.output, ast::FnRetTy::Default(..))
|
||||||
// This is already gated in `parse_path_segment`
|
{
|
||||||
|
self.sess.gated_spans.gate(sym::return_type_notation, span);
|
||||||
} else {
|
} else {
|
||||||
self.sess.gated_spans.gate(sym::associated_type_bounds, span);
|
self.sess.gated_spans.gate(sym::associated_type_bounds, span);
|
||||||
}
|
}
|
||||||
|
|
|
@ -909,7 +909,7 @@ fn find_width_map_from_snippet(
|
||||||
// Strip quotes.
|
// Strip quotes.
|
||||||
let snippet = &snippet[1..snippet.len() - 1];
|
let snippet = &snippet[1..snippet.len() - 1];
|
||||||
|
|
||||||
// Macros like `println` add a newline at the end. That technically doens't make them "literals" anymore, but it's fine
|
// Macros like `println` add a newline at the end. That technically doesn't make them "literals" anymore, but it's fine
|
||||||
// since we will never need to point our spans there, so we lie about it here by ignoring it.
|
// since we will never need to point our spans there, so we lie about it here by ignoring it.
|
||||||
// Since there might actually be newlines in the source code, we need to normalize away all trailing newlines.
|
// Since there might actually be newlines in the source code, we need to normalize away all trailing newlines.
|
||||||
// If we only trimmed it off the input, `format!("\n")` would cause a mismatch as here we they actually match up.
|
// If we only trimmed it off the input, `format!("\n")` would cause a mismatch as here we they actually match up.
|
||||||
|
|
|
@ -666,7 +666,7 @@ impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> {
|
||||||
fn visit_generic_args(&mut self, g: &'v ast::GenericArgs) {
|
fn visit_generic_args(&mut self, g: &'v ast::GenericArgs) {
|
||||||
record_variants!(
|
record_variants!(
|
||||||
(self, g, g, Id::None, ast, GenericArgs, GenericArgs),
|
(self, g, g, Id::None, ast, GenericArgs, GenericArgs),
|
||||||
[AngleBracketed, Parenthesized, ReturnTypeNotation]
|
[AngleBracketed, Parenthesized]
|
||||||
);
|
);
|
||||||
ast_visit::walk_generic_args(self, g)
|
ast_visit::walk_generic_args(self, g)
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,7 @@ resolve_try_adding_local_generic_param_on_method =
|
||||||
try adding a local generic parameter in this method instead
|
try adding a local generic parameter in this method instead
|
||||||
|
|
||||||
resolve_help_try_using_local_generic_param =
|
resolve_help_try_using_local_generic_param =
|
||||||
try using a local generic paramter instead
|
try using a local generic parameter instead
|
||||||
|
|
||||||
resolve_name_is_already_used_as_generic_parameter =
|
resolve_name_is_already_used_as_generic_parameter =
|
||||||
the name `{$name}` is already used for a generic parameter in this item's generic parameters
|
the name `{$name}` is already used for a generic parameter in this item's generic parameters
|
||||||
|
|
|
@ -598,7 +598,7 @@ struct LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||||
/// The current set of local scopes for types and values.
|
/// The current set of local scopes for types and values.
|
||||||
ribs: PerNS<Vec<Rib<'a>>>,
|
ribs: PerNS<Vec<Rib<'a>>>,
|
||||||
|
|
||||||
/// Previous poped `rib`, only used for diagnostic.
|
/// Previous popped `rib`, only used for diagnostic.
|
||||||
last_block_rib: Option<Rib<'a>>,
|
last_block_rib: Option<Rib<'a>>,
|
||||||
|
|
||||||
/// The current set of local scopes, for labels.
|
/// The current set of local scopes, for labels.
|
||||||
|
@ -1116,7 +1116,6 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
GenericArgs::ReturnTypeNotation(_span) => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -312,7 +312,6 @@ impl<'a> From<&'a ast::PathSegment> for Segment {
|
||||||
(args.span, found_lifetimes)
|
(args.span, found_lifetimes)
|
||||||
}
|
}
|
||||||
GenericArgs::Parenthesized(args) => (args.span, true),
|
GenericArgs::Parenthesized(args) => (args.span, true),
|
||||||
GenericArgs::ReturnTypeNotation(span) => (*span, false),
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
(DUMMY_SP, false)
|
(DUMMY_SP, false)
|
||||||
|
@ -1652,7 +1651,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||||
misc2: AmbiguityErrorMisc::None,
|
misc2: AmbiguityErrorMisc::None,
|
||||||
};
|
};
|
||||||
if !self.matches_previous_ambiguity_error(&ambiguity_error) {
|
if !self.matches_previous_ambiguity_error(&ambiguity_error) {
|
||||||
// avoid dumplicated span information to be emitt out
|
// avoid duplicated span information to be emitt out
|
||||||
self.ambiguity_errors.push(ambiguity_error);
|
self.ambiguity_errors.push(ambiguity_error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -219,7 +219,7 @@ fn find_best_match_for_name_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
// We have a tie among several candidates, try to select the best among them ignoring substrings.
|
// We have a tie among several candidates, try to select the best among them ignoring substrings.
|
||||||
// For example, the candidates list `force_capture`, `capture`, and user inputed `forced_capture`,
|
// For example, the candidates list `force_capture`, `capture`, and user inputted `forced_capture`,
|
||||||
// we select `force_capture` with a extra round of edit distance calculation.
|
// we select `force_capture` with a extra round of edit distance calculation.
|
||||||
if next_candidates.len() > 1 {
|
if next_candidates.len() > 1 {
|
||||||
debug_assert!(use_substring_score);
|
debug_assert!(use_substring_score);
|
||||||
|
|
|
@ -1207,7 +1207,7 @@ impl HygieneEncodeContext {
|
||||||
// a `SyntaxContext` that we haven't seen before
|
// a `SyntaxContext` that we haven't seen before
|
||||||
while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() {
|
while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() {
|
||||||
debug!(
|
debug!(
|
||||||
"encode_hygiene: Serializing a round of {:?} SyntaxContextDatas: {:?}",
|
"encode_hygiene: Serializing a round of {:?} SyntaxContextData: {:?}",
|
||||||
self.latest_ctxts.lock().len(),
|
self.latest_ctxts.lock().len(),
|
||||||
self.latest_ctxts
|
self.latest_ctxts
|
||||||
);
|
);
|
||||||
|
|
|
@ -357,6 +357,7 @@ symbols! {
|
||||||
always,
|
always,
|
||||||
and,
|
and,
|
||||||
and_then,
|
and_then,
|
||||||
|
anon,
|
||||||
anonymous_lifetime_in_impl_trait,
|
anonymous_lifetime_in_impl_trait,
|
||||||
any,
|
any,
|
||||||
append_const_msg,
|
append_const_msg,
|
||||||
|
@ -800,6 +801,7 @@ symbols! {
|
||||||
ignore,
|
ignore,
|
||||||
impl_header_lifetime_elision,
|
impl_header_lifetime_elision,
|
||||||
impl_lint_pass,
|
impl_lint_pass,
|
||||||
|
impl_trait_in_assoc_type,
|
||||||
impl_trait_in_bindings,
|
impl_trait_in_bindings,
|
||||||
impl_trait_in_fn_trait_return,
|
impl_trait_in_fn_trait_return,
|
||||||
impl_trait_projections,
|
impl_trait_projections,
|
||||||
|
|
|
@ -755,7 +755,7 @@ impl FromStr for Conv {
|
||||||
"AmdGpuKernel" => Ok(Conv::AmdGpuKernel),
|
"AmdGpuKernel" => Ok(Conv::AmdGpuKernel),
|
||||||
"AvrInterrupt" => Ok(Conv::AvrInterrupt),
|
"AvrInterrupt" => Ok(Conv::AvrInterrupt),
|
||||||
"AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt),
|
"AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt),
|
||||||
_ => Err(format!("'{s}' is not a valid value for entry function call convetion.")),
|
_ => Err(format!("'{s}' is not a valid value for entry function call convention.")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ pub enum Lld {
|
||||||
/// target properties, in accordance with the first design goal.
|
/// target properties, in accordance with the first design goal.
|
||||||
///
|
///
|
||||||
/// The first component of the flavor is tightly coupled with the compilation target,
|
/// The first component of the flavor is tightly coupled with the compilation target,
|
||||||
/// while the `Cc` and `Lld` flags can vary withing the same target.
|
/// while the `Cc` and `Lld` flags can vary within the same target.
|
||||||
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
|
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
|
||||||
pub enum LinkerFlavor {
|
pub enum LinkerFlavor {
|
||||||
/// Unix-like linker with GNU extensions (both naked and compiler-wrapped forms).
|
/// Unix-like linker with GNU extensions (both naked and compiler-wrapped forms).
|
||||||
|
|
|
@ -13,7 +13,7 @@ use rustc_middle::ty::TypeVisitableExt;
|
||||||
use rustc_middle::ty::{self, Ty};
|
use rustc_middle::ty::{self, Ty};
|
||||||
use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
|
use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
|
||||||
|
|
||||||
/// Whether we're canonicalizing a query input or the query reponse.
|
/// Whether we're canonicalizing a query input or the query response.
|
||||||
///
|
///
|
||||||
/// When canonicalizing an input we're in the context of the caller
|
/// When canonicalizing an input we're in the context of the caller
|
||||||
/// while canonicalizing the response happens in the context of the
|
/// while canonicalizing the response happens in the context of the
|
||||||
|
@ -21,7 +21,7 @@ use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub enum CanonicalizeMode {
|
pub enum CanonicalizeMode {
|
||||||
Input,
|
Input,
|
||||||
/// FIXME: We currently return region constraints refering to
|
/// FIXME: We currently return region constraints referring to
|
||||||
/// placeholders and inference variables from a binder instantiated
|
/// placeholders and inference variables from a binder instantiated
|
||||||
/// inside of the query.
|
/// inside of the query.
|
||||||
///
|
///
|
||||||
|
|
|
@ -42,7 +42,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||||
///
|
///
|
||||||
/// - `var_values`: a map from bound variables in the canonical goal to
|
/// - `var_values`: a map from bound variables in the canonical goal to
|
||||||
/// the values inferred while solving the instantiated goal.
|
/// the values inferred while solving the instantiated goal.
|
||||||
/// - `external_constraints`: additional constraints which aren't expressable
|
/// - `external_constraints`: additional constraints which aren't expressible
|
||||||
/// using simple unification of inference variables.
|
/// using simple unification of inference variables.
|
||||||
#[instrument(level = "debug", skip(self))]
|
#[instrument(level = "debug", skip(self))]
|
||||||
pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response(
|
pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response(
|
||||||
|
@ -113,7 +113,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This returns the substitutions to instantiate the bound variables of
|
/// This returns the substitutions to instantiate the bound variables of
|
||||||
/// the canonical reponse. This depends on the `original_values` for the
|
/// the canonical response. This depends on the `original_values` for the
|
||||||
/// bound variables.
|
/// bound variables.
|
||||||
fn compute_query_response_substitution(
|
fn compute_query_response_substitution(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
@ -153,7 +153,7 @@ impl<'tcx> SearchGraph<'tcx> {
|
||||||
/// coinductive cycles.
|
/// coinductive cycles.
|
||||||
///
|
///
|
||||||
/// When we encounter a coinductive cycle, we have to prove the final result of that cycle
|
/// When we encounter a coinductive cycle, we have to prove the final result of that cycle
|
||||||
/// while we are still computing that result. Because of this we continously recompute the
|
/// while we are still computing that result. Because of this we continuously recompute the
|
||||||
/// cycle until the result of the previous iteration is equal to the final result, at which
|
/// cycle until the result of the previous iteration is equal to the final result, at which
|
||||||
/// point we are done.
|
/// point we are done.
|
||||||
///
|
///
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl OverflowData {
|
||||||
/// Updating the current limit when hitting overflow.
|
/// Updating the current limit when hitting overflow.
|
||||||
fn deal_with_overflow(&mut self) {
|
fn deal_with_overflow(&mut self) {
|
||||||
// When first hitting overflow we reduce the overflow limit
|
// When first hitting overflow we reduce the overflow limit
|
||||||
// for all future goals to prevent hangs if there's an exponental
|
// for all future goals to prevent hangs if there's an exponential
|
||||||
// blowup.
|
// blowup.
|
||||||
self.current_limit.0 = self.default_limit.0 / 8;
|
self.current_limit.0 = self.default_limit.0 / 8;
|
||||||
}
|
}
|
||||||
|
|
|
@ -294,7 +294,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Keep this funtion in sync with extract_tupled_inputs_and_output_from_callable
|
// Keep this function in sync with extract_tupled_inputs_and_output_from_callable
|
||||||
// until the old solver (and thus this function) is removed.
|
// until the old solver (and thus this function) is removed.
|
||||||
|
|
||||||
// Okay to skip binder because what we are inspecting doesn't involve bound regions.
|
// Okay to skip binder because what we are inspecting doesn't involve bound regions.
|
||||||
|
@ -406,7 +406,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
match obligation.self_ty().skip_binder().kind() {
|
match obligation.self_ty().skip_binder().kind() {
|
||||||
// Fast path to avoid evaluating an obligation that trivally holds.
|
// Fast path to avoid evaluating an obligation that trivially holds.
|
||||||
// There may be more bounds, but these are checked by the regular path.
|
// There may be more bounds, but these are checked by the regular path.
|
||||||
ty::FnPtr(..) => return false,
|
ty::FnPtr(..) => return false,
|
||||||
// These may potentially implement `FnPtr`
|
// These may potentially implement `FnPtr`
|
||||||
|
|
|
@ -12,7 +12,7 @@ ty_utils_array_not_supported = array construction is not supported in generic co
|
||||||
|
|
||||||
ty_utils_block_not_supported = blocks are not supported in generic constants
|
ty_utils_block_not_supported = blocks are not supported in generic constants
|
||||||
|
|
||||||
ty_utils_never_to_any_not_supported = converting nevers to any is not supported in generic constants
|
ty_utils_never_to_any_not_supported = coercing the `never` type is not supported in generic constants
|
||||||
|
|
||||||
ty_utils_tuple_not_supported = tuple construction is not supported in generic constants
|
ty_utils_tuple_not_supported = tuple construction is not supported in generic constants
|
||||||
|
|
||||||
|
@ -54,4 +54,4 @@ ty_utils_multiple_array_fields_simd_type = monomorphising SIMD type `{$ty}` with
|
||||||
|
|
||||||
ty_utils_oversized_simd_type = monomorphising SIMD type `{$ty}` of length greater than {$max_lanes}
|
ty_utils_oversized_simd_type = monomorphising SIMD type `{$ty}` of length greater than {$max_lanes}
|
||||||
|
|
||||||
ty_utils_non_primative_simd_type = monomorphising SIMD type `{$ty}` with a non-primitive-scalar (integer/float/pointer) element type `{$e_ty}`
|
ty_utils_non_primitive_simd_type = monomorphising SIMD type `{$ty}` with a non-primitive-scalar (integer/float/pointer) element type `{$e_ty}`
|
||||||
|
|
|
@ -95,7 +95,7 @@ pub struct OversizedSimdType<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(ty_utils_non_primative_simd_type)]
|
#[diag(ty_utils_non_primitive_simd_type)]
|
||||||
pub struct NonPrimitiveSimdType<'tcx> {
|
pub struct NonPrimitiveSimdType<'tcx> {
|
||||||
pub ty: Ty<'tcx>,
|
pub ty: Ty<'tcx>,
|
||||||
pub e_ty: Ty<'tcx>,
|
pub e_ty: Ty<'tcx>,
|
||||||
|
|
|
@ -322,7 +322,7 @@ fn layout_of_uncached<'tcx>(
|
||||||
if fi.ty(tcx, substs) != f0_ty {
|
if fi.ty(tcx, substs) != f0_ty {
|
||||||
tcx.sess.delay_span_bug(
|
tcx.sess.delay_span_bug(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
"#[repr(simd)] was applied to an ADT with hetrogeneous field type",
|
"#[repr(simd)] was applied to an ADT with heterogeneous field type",
|
||||||
);
|
);
|
||||||
return Err(LayoutError::Unknown(ty));
|
return Err(LayoutError::Unknown(ty));
|
||||||
}
|
}
|
||||||
|
|
|
@ -83,7 +83,7 @@ pub trait CollectAndApply<T, R>: Sized {
|
||||||
/// Produce a result of type `Self::Output` from `iter`. The result will
|
/// Produce a result of type `Self::Output` from `iter`. The result will
|
||||||
/// typically be produced by applying `f` on the elements produced by
|
/// typically be produced by applying `f` on the elements produced by
|
||||||
/// `iter`, though this may not happen in some impls, e.g. if an error
|
/// `iter`, though this may not happen in some impls, e.g. if an error
|
||||||
/// occured during iteration.
|
/// occurred during iteration.
|
||||||
fn collect_and_apply<I, F>(iter: I, f: F) -> Self::Output
|
fn collect_and_apply<I, F>(iter: I, f: F) -> Self::Output
|
||||||
where
|
where
|
||||||
I: Iterator<Item = Self>,
|
I: Iterator<Item = Self>,
|
||||||
|
|
|
@ -384,6 +384,7 @@ img {
|
||||||
font-size: 0.875rem;
|
font-size: 0.875rem;
|
||||||
flex: 0 0 200px;
|
flex: 0 0 200px;
|
||||||
overflow-y: scroll;
|
overflow-y: scroll;
|
||||||
|
overscroll-behavior: contain;
|
||||||
position: sticky;
|
position: sticky;
|
||||||
height: 100vh;
|
height: 100vh;
|
||||||
top: 0;
|
top: 0;
|
||||||
|
@ -1531,7 +1532,7 @@ However, it's not needed with smaller screen width because the doc/code block is
|
||||||
/*
|
/*
|
||||||
WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
|
WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
|
||||||
If you update this line, then you also need to update the line with the same warning
|
If you update this line, then you also need to update the line with the same warning
|
||||||
in main.js
|
in source-script.js
|
||||||
*/
|
*/
|
||||||
@media (max-width: 700px) {
|
@media (max-width: 700px) {
|
||||||
/* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
|
/* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
height: 1.2rem;
|
height: 1.2rem;
|
||||||
width: 1.2rem;
|
width: 1.2rem;
|
||||||
color: inherit;
|
color: inherit;
|
||||||
border: 1px solid currentColor;
|
border: 2px solid var(--settings-input-border-color);
|
||||||
outline: none;
|
outline: none;
|
||||||
-webkit-appearance: none;
|
-webkit-appearance: none;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
|
@ -52,6 +52,7 @@
|
||||||
}
|
}
|
||||||
.setting-check input:checked {
|
.setting-check input:checked {
|
||||||
background-color: var(--settings-input-color);
|
background-color: var(--settings-input-color);
|
||||||
|
border-width: 1px;
|
||||||
}
|
}
|
||||||
.setting-radio input:focus, .setting-check input:focus {
|
.setting-radio input:focus, .setting-check input:focus {
|
||||||
box-shadow: 0 0 1px 1px var(--settings-input-color);
|
box-shadow: 0 0 1px 1px var(--settings-input-color);
|
||||||
|
|
|
@ -7,6 +7,7 @@ Original by Dempfi (https://github.com/dempfi/ayu)
|
||||||
--main-background-color: #0f1419;
|
--main-background-color: #0f1419;
|
||||||
--main-color: #c5c5c5;
|
--main-color: #c5c5c5;
|
||||||
--settings-input-color: #ffb454;
|
--settings-input-color: #ffb454;
|
||||||
|
--settings-input-border-color: #999;
|
||||||
--settings-button-color: #fff;
|
--settings-button-color: #fff;
|
||||||
--settings-button-border-focus: #e0e0e0;
|
--settings-button-border-focus: #e0e0e0;
|
||||||
--sidebar-background-color: #14191f;
|
--sidebar-background-color: #14191f;
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
--main-background-color: #353535;
|
--main-background-color: #353535;
|
||||||
--main-color: #ddd;
|
--main-color: #ddd;
|
||||||
--settings-input-color: #2196f3;
|
--settings-input-color: #2196f3;
|
||||||
|
--settings-input-border-color: #999;
|
||||||
--settings-button-color: #000;
|
--settings-button-color: #000;
|
||||||
--settings-button-border-focus: #ffb900;
|
--settings-button-border-focus: #ffb900;
|
||||||
--sidebar-background-color: #505050;
|
--sidebar-background-color: #505050;
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
--main-background-color: white;
|
--main-background-color: white;
|
||||||
--main-color: black;
|
--main-color: black;
|
||||||
--settings-input-color: #2196f3;
|
--settings-input-color: #2196f3;
|
||||||
|
--settings-input-border-color: #717171;
|
||||||
--settings-button-color: #000;
|
--settings-button-color: #000;
|
||||||
--settings-button-border-focus: #717171;
|
--settings-button-border-focus: #717171;
|
||||||
--sidebar-background-color: #F5F5F5;
|
--sidebar-background-color: #F5F5F5;
|
||||||
|
|
|
@ -4,11 +4,6 @@
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
// WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
|
|
||||||
// If you update this line, then you also need to update the media query with the same
|
|
||||||
// warning in rustdoc.css
|
|
||||||
window.RUSTDOC_MOBILE_BREAKPOINT = 700;
|
|
||||||
|
|
||||||
// Given a basename (e.g. "storage") and an extension (e.g. ".js"), return a URL
|
// Given a basename (e.g. "storage") and an extension (e.g. ".js"), return a URL
|
||||||
// for a resource under the root-path, with the resource-suffix.
|
// for a resource under the root-path, with the resource-suffix.
|
||||||
function resourcePath(basename, extension) {
|
function resourcePath(basename, extension) {
|
||||||
|
@ -730,65 +725,18 @@ function preLoadCss(cssUrl) {
|
||||||
window.rustdoc_add_line_numbers_to_examples();
|
window.rustdoc_add_line_numbers_to_examples();
|
||||||
}
|
}
|
||||||
|
|
||||||
let oldSidebarScrollPosition = null;
|
|
||||||
|
|
||||||
// Scroll locking used both here and in source-script.js
|
|
||||||
|
|
||||||
window.rustdocMobileScrollLock = function() {
|
|
||||||
const mobile_topbar = document.querySelector(".mobile-topbar");
|
|
||||||
if (window.innerWidth <= window.RUSTDOC_MOBILE_BREAKPOINT) {
|
|
||||||
// This is to keep the scroll position on mobile.
|
|
||||||
oldSidebarScrollPosition = window.scrollY;
|
|
||||||
document.body.style.width = `${document.body.offsetWidth}px`;
|
|
||||||
document.body.style.position = "fixed";
|
|
||||||
document.body.style.top = `-${oldSidebarScrollPosition}px`;
|
|
||||||
if (mobile_topbar) {
|
|
||||||
mobile_topbar.style.top = `${oldSidebarScrollPosition}px`;
|
|
||||||
mobile_topbar.style.position = "relative";
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
oldSidebarScrollPosition = null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
window.rustdocMobileScrollUnlock = function() {
|
|
||||||
const mobile_topbar = document.querySelector(".mobile-topbar");
|
|
||||||
if (oldSidebarScrollPosition !== null) {
|
|
||||||
// This is to keep the scroll position on mobile.
|
|
||||||
document.body.style.width = "";
|
|
||||||
document.body.style.position = "";
|
|
||||||
document.body.style.top = "";
|
|
||||||
if (mobile_topbar) {
|
|
||||||
mobile_topbar.style.top = "";
|
|
||||||
mobile_topbar.style.position = "";
|
|
||||||
}
|
|
||||||
// The scroll position is lost when resetting the style, hence why we store it in
|
|
||||||
// `oldSidebarScrollPosition`.
|
|
||||||
window.scrollTo(0, oldSidebarScrollPosition);
|
|
||||||
oldSidebarScrollPosition = null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
function showSidebar() {
|
function showSidebar() {
|
||||||
window.hideAllModals(false);
|
window.hideAllModals(false);
|
||||||
window.rustdocMobileScrollLock();
|
|
||||||
const sidebar = document.getElementsByClassName("sidebar")[0];
|
const sidebar = document.getElementsByClassName("sidebar")[0];
|
||||||
addClass(sidebar, "shown");
|
addClass(sidebar, "shown");
|
||||||
}
|
}
|
||||||
|
|
||||||
function hideSidebar() {
|
function hideSidebar() {
|
||||||
window.rustdocMobileScrollUnlock();
|
|
||||||
const sidebar = document.getElementsByClassName("sidebar")[0];
|
const sidebar = document.getElementsByClassName("sidebar")[0];
|
||||||
removeClass(sidebar, "shown");
|
removeClass(sidebar, "shown");
|
||||||
}
|
}
|
||||||
|
|
||||||
window.addEventListener("resize", () => {
|
window.addEventListener("resize", () => {
|
||||||
if (window.innerWidth > window.RUSTDOC_MOBILE_BREAKPOINT &&
|
|
||||||
oldSidebarScrollPosition !== null) {
|
|
||||||
// If the user opens the sidebar in "mobile" mode, and then grows the browser window,
|
|
||||||
// we need to switch away from mobile mode and make the main content area scrollable.
|
|
||||||
hideSidebar();
|
|
||||||
}
|
|
||||||
if (window.CURRENT_TOOLTIP_ELEMENT) {
|
if (window.CURRENT_TOOLTIP_ELEMENT) {
|
||||||
// As a workaround to the behavior of `contains: layout` used in doc togglers,
|
// As a workaround to the behavior of `contains: layout` used in doc togglers,
|
||||||
// tooltip popovers are positioned using javascript.
|
// tooltip popovers are positioned using javascript.
|
||||||
|
|
|
@ -15,8 +15,13 @@ const NAME_OFFSET = 0;
|
||||||
const DIRS_OFFSET = 1;
|
const DIRS_OFFSET = 1;
|
||||||
const FILES_OFFSET = 2;
|
const FILES_OFFSET = 2;
|
||||||
|
|
||||||
|
// WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY
|
||||||
|
// If you update this line, then you also need to update the media query with the same
|
||||||
|
// warning in rustdoc.css
|
||||||
|
const RUSTDOC_MOBILE_BREAKPOINT = 700;
|
||||||
|
|
||||||
function closeSidebarIfMobile() {
|
function closeSidebarIfMobile() {
|
||||||
if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) {
|
if (window.innerWidth < RUSTDOC_MOBILE_BREAKPOINT) {
|
||||||
updateLocalStorage("source-sidebar-show", "false");
|
updateLocalStorage("source-sidebar-show", "false");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,12 +74,10 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) {
|
||||||
function toggleSidebar() {
|
function toggleSidebar() {
|
||||||
const child = this.parentNode.children[0];
|
const child = this.parentNode.children[0];
|
||||||
if (child.innerText === ">") {
|
if (child.innerText === ">") {
|
||||||
window.rustdocMobileScrollLock();
|
|
||||||
addClass(document.documentElement, "source-sidebar-expanded");
|
addClass(document.documentElement, "source-sidebar-expanded");
|
||||||
child.innerText = "<";
|
child.innerText = "<";
|
||||||
updateLocalStorage("source-sidebar-show", "true");
|
updateLocalStorage("source-sidebar-show", "true");
|
||||||
} else {
|
} else {
|
||||||
window.rustdocMobileScrollUnlock();
|
|
||||||
removeClass(document.documentElement, "source-sidebar-expanded");
|
removeClass(document.documentElement, "source-sidebar-expanded");
|
||||||
child.innerText = ">";
|
child.innerText = ">";
|
||||||
updateLocalStorage("source-sidebar-show", "false");
|
updateLocalStorage("source-sidebar-show", "false");
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
#![feature(type_ascription)]
|
#![feature(type_ascription)]
|
||||||
#![feature(iter_intersperse)]
|
#![feature(iter_intersperse)]
|
||||||
#![feature(type_alias_impl_trait)]
|
#![feature(type_alias_impl_trait)]
|
||||||
|
#![cfg_attr(not(bootstrap), feature(impl_trait_in_assoc_type))]
|
||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
#![warn(rustc::internal)]
|
#![warn(rustc::internal)]
|
||||||
#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
|
#![allow(clippy::collapsible_if, clippy::collapsible_else_if)]
|
||||||
|
|
|
@ -9,7 +9,7 @@ use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
// FIXME: The following limits should be reduced eventually.
|
// FIXME: The following limits should be reduced eventually.
|
||||||
const ENTRY_LIMIT: usize = 885;
|
const ENTRY_LIMIT: usize = 885;
|
||||||
const ROOT_ENTRY_LIMIT: usize = 881;
|
const ROOT_ENTRY_LIMIT: usize = 880;
|
||||||
const ISSUES_ENTRY_LIMIT: usize = 1978;
|
const ISSUES_ENTRY_LIMIT: usize = 1978;
|
||||||
|
|
||||||
fn check_entries(tests_path: &Path, bad: &mut bool) {
|
fn check_entries(tests_path: &Path, bad: &mut bool) {
|
||||||
|
@ -22,18 +22,19 @@ fn check_entries(tests_path: &Path, bad: &mut bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let (mut max, mut max_root, mut max_issues) = (0usize, 0usize, 0usize);
|
||||||
for (dir_path, count) in directories {
|
for (dir_path, count) in directories {
|
||||||
// Use special values for these dirs.
|
// Use special values for these dirs.
|
||||||
let is_root = tests_path.join("ui") == dir_path;
|
let is_root = tests_path.join("ui") == dir_path;
|
||||||
let is_issues_dir = tests_path.join("ui/issues") == dir_path;
|
let is_issues_dir = tests_path.join("ui/issues") == dir_path;
|
||||||
let limit = if is_root {
|
let (limit, maxcnt) = if is_root {
|
||||||
ROOT_ENTRY_LIMIT
|
(ROOT_ENTRY_LIMIT, &mut max_root)
|
||||||
} else if is_issues_dir {
|
} else if is_issues_dir {
|
||||||
ISSUES_ENTRY_LIMIT
|
(ISSUES_ENTRY_LIMIT, &mut max_issues)
|
||||||
} else {
|
} else {
|
||||||
ENTRY_LIMIT
|
(ENTRY_LIMIT, &mut max)
|
||||||
};
|
};
|
||||||
|
*maxcnt = (*maxcnt).max(count);
|
||||||
if count > limit {
|
if count > limit {
|
||||||
tidy_error!(
|
tidy_error!(
|
||||||
bad,
|
bad,
|
||||||
|
@ -45,6 +46,21 @@ fn check_entries(tests_path: &Path, bad: &mut bool) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if ENTRY_LIMIT > max {
|
||||||
|
tidy_error!(bad, "`ENTRY_LIMIT` is too high (is {ENTRY_LIMIT}, should be {max})");
|
||||||
|
}
|
||||||
|
if ROOT_ENTRY_LIMIT > max_root {
|
||||||
|
tidy_error!(
|
||||||
|
bad,
|
||||||
|
"`ROOT_ENTRY_LIMIT` is too high (is {ROOT_ENTRY_LIMIT}, should be {max_root})"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if ISSUES_ENTRY_LIMIT > max_issues {
|
||||||
|
tidy_error!(
|
||||||
|
bad,
|
||||||
|
"`ISSUES_ENTRY_LIMIT` is too high (is {ISSUES_ENTRY_LIMIT}, should be {max_issues})"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check(path: &Path, bad: &mut bool) {
|
pub fn check(path: &Path, bad: &mut bool) {
|
||||||
|
|
|
@ -56,11 +56,12 @@ move-cursor-to: "#settings-menu > a"
|
||||||
assert-css: (
|
assert-css: (
|
||||||
"#theme-dark",
|
"#theme-dark",
|
||||||
{
|
{
|
||||||
"border-color": "rgb(221, 221, 221)",
|
"border-color": "rgb(153, 153, 153)",
|
||||||
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset",
|
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset",
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert-css: ("#theme-light", {"border-color": "rgb(221, 221, 221)", "box-shadow": "none"})
|
assert-css: ("#theme-light", {"border-color": "rgb(153, 153, 153)", "box-shadow": "none"})
|
||||||
// Let's start with the hover for radio buttons.
|
// Let's start with the hover for radio buttons.
|
||||||
move-cursor-to: "#theme-dark"
|
move-cursor-to: "#theme-dark"
|
||||||
assert-css: (
|
assert-css: (
|
||||||
|
@ -68,26 +69,36 @@ assert-css: (
|
||||||
{
|
{
|
||||||
"border-color": "rgb(33, 150, 243)",
|
"border-color": "rgb(33, 150, 243)",
|
||||||
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset",
|
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset",
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
move-cursor-to: "#theme-light"
|
move-cursor-to: "#theme-light"
|
||||||
assert-css: ("#theme-light", {"border-color": "rgb(33, 150, 243)", "box-shadow": "none"})
|
assert-css: (
|
||||||
|
"#theme-light",
|
||||||
|
{
|
||||||
|
"border-color": "rgb(33, 150, 243)",
|
||||||
|
"box-shadow": "none",
|
||||||
|
"border-width": "2px",
|
||||||
|
}
|
||||||
|
)
|
||||||
move-cursor-to: "#theme-ayu"
|
move-cursor-to: "#theme-ayu"
|
||||||
// Let's now check with the focus for radio buttons.
|
// Let's now check with the focus for radio buttons.
|
||||||
focus: "#theme-dark"
|
focus: "#theme-dark"
|
||||||
assert-css: (
|
assert-css: (
|
||||||
"#theme-dark",
|
"#theme-dark",
|
||||||
{
|
{
|
||||||
"border-color": "rgb(221, 221, 221)",
|
"border-color": "rgb(153, 153, 153)",
|
||||||
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset, rgb(33, 150, 243) 0px 0px 2px 2px",
|
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset, rgb(33, 150, 243) 0px 0px 2px 2px",
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
focus: "#theme-light"
|
focus: "#theme-light"
|
||||||
assert-css: (
|
assert-css: (
|
||||||
"#theme-light",
|
"#theme-light",
|
||||||
{
|
{
|
||||||
"border-color": "rgb(221, 221, 221)",
|
"border-color": "rgb(153, 153, 153)",
|
||||||
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
// Now we check we both focus and hover for radio buttons.
|
// Now we check we both focus and hover for radio buttons.
|
||||||
|
@ -98,6 +109,7 @@ assert-css: (
|
||||||
{
|
{
|
||||||
"border-color": "rgb(33, 150, 243)",
|
"border-color": "rgb(33, 150, 243)",
|
||||||
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset, rgb(33, 150, 243) 0px 0px 2px 2px",
|
"box-shadow": "rgb(53, 53, 53) 0px 0px 0px 3px inset, rgb(33, 150, 243) 0px 0px 2px 2px",
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
move-cursor-to: "#theme-light"
|
move-cursor-to: "#theme-light"
|
||||||
|
@ -107,6 +119,7 @@ assert-css: (
|
||||||
{
|
{
|
||||||
"border-color": "rgb(33, 150, 243)",
|
"border-color": "rgb(33, 150, 243)",
|
||||||
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
// Now we check the setting-radio-name is on a different line than the label.
|
// Now we check the setting-radio-name is on a different line than the label.
|
||||||
|
@ -142,7 +155,18 @@ assert-css: (
|
||||||
"#auto-hide-large-items",
|
"#auto-hide-large-items",
|
||||||
{
|
{
|
||||||
"background-color": "rgb(33, 150, 243)",
|
"background-color": "rgb(33, 150, 243)",
|
||||||
"border-color": "rgb(221, 221, 221)",
|
"border-color": "rgb(153, 153, 153)",
|
||||||
|
// 1px border when checked
|
||||||
|
"border-width": "1px",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert-css: (
|
||||||
|
"#auto-hide-method-docs",
|
||||||
|
{
|
||||||
|
"background-color": "rgba(0, 0, 0, 0)",
|
||||||
|
"border-color": "rgb(153, 153, 153)",
|
||||||
|
// 2px border when unchecked
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
// Let's start with the hover for toggles.
|
// Let's start with the hover for toggles.
|
||||||
|
@ -152,6 +176,18 @@ assert-css: (
|
||||||
{
|
{
|
||||||
"background-color": "rgb(33, 150, 243)",
|
"background-color": "rgb(33, 150, 243)",
|
||||||
"border-color": "rgb(33, 150, 243)",
|
"border-color": "rgb(33, 150, 243)",
|
||||||
|
// 1px border when checked
|
||||||
|
"border-width": "1px",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
move-cursor-to: "#auto-hide-method-docs"
|
||||||
|
assert-css: (
|
||||||
|
"#auto-hide-method-docs",
|
||||||
|
{
|
||||||
|
"background-color": "rgba(0, 0, 0, 0)",
|
||||||
|
"border-color": "rgb(33, 150, 243)",
|
||||||
|
// 2px border when unchecked
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
move-cursor-to: "#settings-menu > a"
|
move-cursor-to: "#settings-menu > a"
|
||||||
|
@ -161,8 +197,21 @@ assert-css: (
|
||||||
"#auto-hide-large-items",
|
"#auto-hide-large-items",
|
||||||
{
|
{
|
||||||
"background-color": "rgb(33, 150, 243)",
|
"background-color": "rgb(33, 150, 243)",
|
||||||
"border-color": "rgb(221, 221, 221)",
|
"border-color": "rgb(153, 153, 153)",
|
||||||
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
||||||
|
// 1px border when checked
|
||||||
|
"border-width": "1px",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
focus: "#auto-hide-method-docs"
|
||||||
|
assert-css: (
|
||||||
|
"#auto-hide-method-docs",
|
||||||
|
{
|
||||||
|
"background-color": "rgba(0, 0, 0, 0)",
|
||||||
|
"border-color": "rgb(153, 153, 153)",
|
||||||
|
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
||||||
|
// 2px border when unchecked
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
// Now we check we both focus and hover for toggles.
|
// Now we check we both focus and hover for toggles.
|
||||||
|
@ -174,6 +223,20 @@ assert-css: (
|
||||||
"background-color": "rgb(33, 150, 243)",
|
"background-color": "rgb(33, 150, 243)",
|
||||||
"border-color": "rgb(33, 150, 243)",
|
"border-color": "rgb(33, 150, 243)",
|
||||||
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
||||||
|
// 1px border when checked
|
||||||
|
"border-width": "1px",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
move-cursor-to: "#auto-hide-method-docs"
|
||||||
|
focus: "#auto-hide-method-docs"
|
||||||
|
assert-css: (
|
||||||
|
"#auto-hide-method-docs",
|
||||||
|
{
|
||||||
|
"background-color": "rgba(0, 0, 0, 0)",
|
||||||
|
"border-color": "rgb(33, 150, 243)",
|
||||||
|
"box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
|
||||||
|
// 2px border when unchecked
|
||||||
|
"border-width": "2px",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,31 +1,12 @@
|
||||||
// This test ensures that the mobile sidebar preserves scroll position.
|
// This test ensures that the mobile disables scrolling the page.
|
||||||
go-to: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html"
|
go-to: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html"
|
||||||
// Switching to "mobile view" by reducing the width to 600px.
|
set-window-size: (1280, 800) // desktop
|
||||||
set-window-size: (700, 600)
|
assert-css: (".sidebar", {"overscroll-behavior": "contain"})
|
||||||
assert-css: (".sidebar", {"display": "block", "left": "-1000px"})
|
set-window-size: (700, 600) // mobile
|
||||||
|
assert-css: (".sidebar", {"overscroll-behavior": "contain"})
|
||||||
|
|
||||||
// Scroll down.
|
go-to: "file://" + |DOC_PATH| + "/src/test_docs/lib.rs.html"
|
||||||
scroll-to: "//h2[@id='blanket-implementations']"
|
set-window-size: (1280, 800) // desktop
|
||||||
assert-window-property: {"pageYOffset": "622"}
|
assert-css: (".sidebar", {"overscroll-behavior": "contain"})
|
||||||
|
set-window-size: (700, 600) // mobile
|
||||||
// Open the sidebar menu.
|
assert-css: (".sidebar", {"overscroll-behavior": "contain"})
|
||||||
click: ".sidebar-menu-toggle"
|
|
||||||
wait-for-css: (".sidebar", {"left": "0px"})
|
|
||||||
|
|
||||||
// We are no longer "scrolled". It's important that the user can't
|
|
||||||
// scroll the body at all, but these test scripts are run only in Chrome,
|
|
||||||
// and we need to use a more complicated solution to this problem because
|
|
||||||
// of Mobile Safari...
|
|
||||||
assert-window-property: {"pageYOffset": "0"}
|
|
||||||
|
|
||||||
// Close the sidebar menu. Make sure the scroll position gets restored.
|
|
||||||
click: ".sidebar-menu-toggle"
|
|
||||||
wait-for-css: (".sidebar", {"left": "-1000px"})
|
|
||||||
assert-window-property: {"pageYOffset": "622"}
|
|
||||||
|
|
||||||
// Now test that scrollability returns when the browser window is just resized.
|
|
||||||
click: ".sidebar-menu-toggle"
|
|
||||||
wait-for-css: (".sidebar", {"left": "0px"})
|
|
||||||
assert-window-property: {"pageYOffset": "0"}
|
|
||||||
set-window-size: (900, 600)
|
|
||||||
assert-window-property: {"pageYOffset": "622"}
|
|
||||||
|
|
|
@ -183,22 +183,12 @@ wait-for-css: (".sidebar", {"left": "-1000px"})
|
||||||
// The "scrollTop" property should be the same.
|
// The "scrollTop" property should be the same.
|
||||||
assert-window-property: {"pageYOffset": "2542"}
|
assert-window-property: {"pageYOffset": "2542"}
|
||||||
|
|
||||||
// We now check that the scroll position is restored if the window is resized.
|
|
||||||
set-window-size: (500, 700)
|
|
||||||
click: "#src-sidebar-toggle"
|
|
||||||
wait-for-css: ("#source-sidebar", {"visibility": "visible"})
|
|
||||||
assert-window-property: {"pageYOffset": "0"}
|
|
||||||
set-window-size: (900, 900)
|
|
||||||
assert-window-property: {"pageYOffset": "2542"}
|
|
||||||
set-window-size: (500, 700)
|
|
||||||
click: "#src-sidebar-toggle"
|
|
||||||
wait-for-css: ("#source-sidebar", {"visibility": "hidden"})
|
|
||||||
|
|
||||||
// We now check that opening the sidebar and clicking a link will close it.
|
// We now check that opening the sidebar and clicking a link will close it.
|
||||||
// The behavior here on mobile is different than the behavior on desktop,
|
// The behavior here on mobile is different than the behavior on desktop,
|
||||||
// but common sense dictates that if you have a list of files that fills the entire screen, and
|
// but common sense dictates that if you have a list of files that fills the entire screen, and
|
||||||
// you click one of them, you probably want to actually see the file's contents, and not just
|
// you click one of them, you probably want to actually see the file's contents, and not just
|
||||||
// make it the current selection.
|
// make it the current selection.
|
||||||
|
set-window-size: (500, 700)
|
||||||
click: "#src-sidebar-toggle"
|
click: "#src-sidebar-toggle"
|
||||||
wait-for-css: ("#source-sidebar", {"visibility": "visible"})
|
wait-for-css: ("#source-sidebar", {"visibility": "visible"})
|
||||||
assert-local-storage: {"rustdoc-source-sidebar-show": "true"}
|
assert-local-storage: {"rustdoc-source-sidebar-show": "true"}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//edition:2018
|
//edition:2018
|
||||||
|
|
||||||
#![feature(type_alias_impl_trait)]
|
#![feature(impl_trait_in_assoc_type)]
|
||||||
|
|
||||||
pub trait Foo {
|
pub trait Foo {
|
||||||
type X: std::future::Future<Output = ()>;
|
type X: std::future::Future<Output = ()>;
|
||||||
|
|
|
@ -12,11 +12,11 @@ fn foo<T: Trait<method(i32): Send>>() {}
|
||||||
//~^ ERROR argument types not allowed with return type notation
|
//~^ ERROR argument types not allowed with return type notation
|
||||||
//~| ERROR associated type bounds are unstable
|
//~| ERROR associated type bounds are unstable
|
||||||
|
|
||||||
fn bar<T: Trait<method(..) -> (): Send>>() {}
|
fn bar<T: Trait<method() -> (): Send>>() {}
|
||||||
//~^ ERROR return type not allowed with return type notation
|
//~^ ERROR return type not allowed with return type notation
|
||||||
|
|
||||||
fn baz<T: Trait<method(): Send>>() {}
|
|
||||||
//~^ ERROR return type notation arguments must be elided with `..`
|
|
||||||
//~| ERROR associated type bounds are unstable
|
//~| ERROR associated type bounds are unstable
|
||||||
|
|
||||||
|
fn baz<T: Trait<method(..): Send>>() {}
|
||||||
|
//~^ ERROR return type notation uses `()` instead of `(..)` for elided arguments
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
error: return type not allowed with return type notation
|
error: return type notation uses `()` instead of `(..)` for elided arguments
|
||||||
--> $DIR/bad-inputs-and-output.rs:15:28
|
--> $DIR/bad-inputs-and-output.rs:19:24
|
||||||
|
|
|
|
||||||
LL | fn bar<T: Trait<method(..) -> (): Send>>() {}
|
LL | fn baz<T: Trait<method(..): Send>>() {}
|
||||||
| ^^^^^ help: remove the return type
|
| ^^ help: remove the `..`
|
||||||
|
|
||||||
error[E0658]: associated type bounds are unstable
|
error[E0658]: associated type bounds are unstable
|
||||||
--> $DIR/bad-inputs-and-output.rs:11:17
|
--> $DIR/bad-inputs-and-output.rs:11:17
|
||||||
|
@ -14,10 +14,10 @@ LL | fn foo<T: Trait<method(i32): Send>>() {}
|
||||||
= help: add `#![feature(associated_type_bounds)]` to the crate attributes to enable
|
= help: add `#![feature(associated_type_bounds)]` to the crate attributes to enable
|
||||||
|
|
||||||
error[E0658]: associated type bounds are unstable
|
error[E0658]: associated type bounds are unstable
|
||||||
--> $DIR/bad-inputs-and-output.rs:18:17
|
--> $DIR/bad-inputs-and-output.rs:15:17
|
||||||
|
|
|
|
||||||
LL | fn baz<T: Trait<method(): Send>>() {}
|
LL | fn bar<T: Trait<method() -> (): Send>>() {}
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
= note: see issue #52662 <https://github.com/rust-lang/rust/issues/52662> for more information
|
= note: see issue #52662 <https://github.com/rust-lang/rust/issues/52662> for more information
|
||||||
= help: add `#![feature(associated_type_bounds)]` to the crate attributes to enable
|
= help: add `#![feature(associated_type_bounds)]` to the crate attributes to enable
|
||||||
|
@ -43,13 +43,13 @@ error: argument types not allowed with return type notation
|
||||||
--> $DIR/bad-inputs-and-output.rs:11:23
|
--> $DIR/bad-inputs-and-output.rs:11:23
|
||||||
|
|
|
|
||||||
LL | fn foo<T: Trait<method(i32): Send>>() {}
|
LL | fn foo<T: Trait<method(i32): Send>>() {}
|
||||||
| ^^^^^ help: remove the input types: `(..)`
|
| ^^^^^ help: remove the input types: `()`
|
||||||
|
|
||||||
error: return type notation arguments must be elided with `..`
|
error: return type not allowed with return type notation
|
||||||
--> $DIR/bad-inputs-and-output.rs:18:23
|
--> $DIR/bad-inputs-and-output.rs:15:25
|
||||||
|
|
|
|
||||||
LL | fn baz<T: Trait<method(): Send>>() {}
|
LL | fn bar<T: Trait<method() -> (): Send>>() {}
|
||||||
| ^^ help: add `..`: `(..)`
|
| ^^^^^^ help: remove the return type
|
||||||
|
|
||||||
error: aborting due to 5 previous errors; 2 warnings emitted
|
error: aborting due to 5 previous errors; 2 warnings emitted
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ async fn foo<T: Foo>() -> Result<(), ()> {
|
||||||
fn is_send(_: impl Send) {}
|
fn is_send(_: impl Send) {}
|
||||||
|
|
||||||
fn test<
|
fn test<
|
||||||
#[cfg(with)] T: Foo<method(..): Send>,
|
#[cfg(with)] T: Foo<method(): Send>,
|
||||||
#[cfg(without)] T: Foo,
|
#[cfg(without)] T: Foo,
|
||||||
>() {
|
>() {
|
||||||
is_send(foo::<T>());
|
is_send(foo::<T>());
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue