Auto merge of #87118 - JohnTitor:rollup-8ltidsq, r=JohnTitor
Rollup of 6 pull requests Successful merges: - #87085 (Search result colors) - #87090 (Make BTreeSet::split_off name elements like other set methods do) - #87098 (Unignore some pretty printing tests) - #87099 (Upgrade `cc` crate to 1.0.69) - #87101 (Suggest a path separator if a stray colon is found in a match arm) - #87102 (Add GUI test for "go to first" feature) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
ee5ed4a88d
24 changed files with 296 additions and 39 deletions
|
@ -435,9 +435,9 @@ version = "0.1.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.68"
|
version = "1.0.69"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4a72c244c1ff497a746a7e1fb3d14bd08420ecda70c8f25c7112f2781652d787"
|
checksum = "e70cc2f62c6ce1868963827bd677764c62d07c3d9a3e1fb1177ee1a9ab199eb2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jobserver",
|
"jobserver",
|
||||||
]
|
]
|
||||||
|
|
|
@ -9,7 +9,7 @@ test = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bitflags = "1.2.1"
|
bitflags = "1.2.1"
|
||||||
cc = "1.0.68"
|
cc = "1.0.69"
|
||||||
itertools = "0.9"
|
itertools = "0.9"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
libc = "0.2.50"
|
libc = "0.2.50"
|
||||||
|
|
|
@ -22,7 +22,9 @@ use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::{Applicability, FatalError, PResult};
|
use rustc_errors::{Applicability, FatalError, PResult};
|
||||||
use rustc_feature::Features;
|
use rustc_feature::Features;
|
||||||
use rustc_parse::parser::{AttemptLocalParseRecovery, ForceCollect, Parser, RecoverComma};
|
use rustc_parse::parser::{
|
||||||
|
AttemptLocalParseRecovery, ForceCollect, Parser, RecoverColon, RecoverComma,
|
||||||
|
};
|
||||||
use rustc_parse::validate_attr;
|
use rustc_parse::validate_attr;
|
||||||
use rustc_session::lint::builtin::UNUSED_DOC_COMMENTS;
|
use rustc_session::lint::builtin::UNUSED_DOC_COMMENTS;
|
||||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||||
|
@ -930,9 +932,11 @@ pub fn parse_ast_fragment<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
AstFragmentKind::Ty => AstFragment::Ty(this.parse_ty()?),
|
AstFragmentKind::Ty => AstFragment::Ty(this.parse_ty()?),
|
||||||
AstFragmentKind::Pat => {
|
AstFragmentKind::Pat => AstFragment::Pat(this.parse_pat_allow_top_alt(
|
||||||
AstFragment::Pat(this.parse_pat_allow_top_alt(None, RecoverComma::No)?)
|
None,
|
||||||
}
|
RecoverComma::No,
|
||||||
|
RecoverColon::Yes,
|
||||||
|
)?),
|
||||||
AstFragmentKind::Arms
|
AstFragmentKind::Arms
|
||||||
| AstFragmentKind::Fields
|
| AstFragmentKind::Fields
|
||||||
| AstFragmentKind::FieldPats
|
| AstFragmentKind::FieldPats
|
||||||
|
|
|
@ -13,4 +13,4 @@ libc = "0.2.73"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
build_helper = { path = "../../src/build_helper" }
|
build_helper = { path = "../../src/build_helper" }
|
||||||
cc = "1.0.68"
|
cc = "1.0.69"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use super::pat::{RecoverComma, PARAM_EXPECTED};
|
use super::pat::{RecoverColon, RecoverComma, PARAM_EXPECTED};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType};
|
use super::{AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType};
|
||||||
use super::{SemiColonMode, SeqSep, TokenExpectType, TrailingToken};
|
use super::{SemiColonMode, SeqSep, TokenExpectType, TrailingToken};
|
||||||
|
@ -1813,7 +1813,7 @@ impl<'a> Parser<'a> {
|
||||||
/// The `let` token has already been eaten.
|
/// The `let` token has already been eaten.
|
||||||
fn parse_let_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
fn parse_let_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
||||||
let lo = self.prev_token.span;
|
let lo = self.prev_token.span;
|
||||||
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes)?;
|
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
|
||||||
self.expect(&token::Eq)?;
|
self.expect(&token::Eq)?;
|
||||||
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
|
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
|
||||||
this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
|
this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
|
||||||
|
@ -1876,7 +1876,7 @@ impl<'a> Parser<'a> {
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes)?;
|
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
|
||||||
if !self.eat_keyword(kw::In) {
|
if !self.eat_keyword(kw::In) {
|
||||||
self.error_missing_in_for_loop();
|
self.error_missing_in_for_loop();
|
||||||
}
|
}
|
||||||
|
@ -2083,7 +2083,7 @@ impl<'a> Parser<'a> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
let lo = this.token.span;
|
let lo = this.token.span;
|
||||||
let pat = this.parse_pat_allow_top_alt(None, RecoverComma::Yes)?;
|
let pat = this.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
|
||||||
let guard = if this.eat_keyword(kw::If) {
|
let guard = if this.eat_keyword(kw::If) {
|
||||||
let if_span = this.prev_token.span;
|
let if_span = this.prev_token.span;
|
||||||
let cond = this.parse_expr()?;
|
let cond = this.parse_expr()?;
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::lexer::UnmatchedBrace;
|
||||||
pub use attr_wrapper::AttrWrapper;
|
pub use attr_wrapper::AttrWrapper;
|
||||||
pub use diagnostics::AttemptLocalParseRecovery;
|
pub use diagnostics::AttemptLocalParseRecovery;
|
||||||
use diagnostics::Error;
|
use diagnostics::Error;
|
||||||
pub use pat::RecoverComma;
|
pub use pat::{RecoverColon, RecoverComma};
|
||||||
pub use path::PathStyle;
|
pub use path::PathStyle;
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
|
|
@ -5,7 +5,7 @@ use rustc_ast_pretty::pprust;
|
||||||
use rustc_errors::PResult;
|
use rustc_errors::PResult;
|
||||||
use rustc_span::symbol::{kw, Ident};
|
use rustc_span::symbol::{kw, Ident};
|
||||||
|
|
||||||
use crate::parser::pat::RecoverComma;
|
use crate::parser::pat::{RecoverColon, RecoverComma};
|
||||||
use crate::parser::{FollowedByType, ForceCollect, Parser, PathStyle};
|
use crate::parser::{FollowedByType, ForceCollect, Parser, PathStyle};
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
|
@ -125,7 +125,7 @@ impl<'a> Parser<'a> {
|
||||||
token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
|
token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
|
||||||
NonterminalKind::PatParam { .. } => this.parse_pat_no_top_alt(None),
|
NonterminalKind::PatParam { .. } => this.parse_pat_no_top_alt(None),
|
||||||
NonterminalKind::PatWithOr { .. } => {
|
NonterminalKind::PatWithOr { .. } => {
|
||||||
this.parse_pat_allow_top_alt(None, RecoverComma::No)
|
this.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
})?)
|
})?)
|
||||||
|
|
|
@ -24,6 +24,13 @@ pub enum RecoverComma {
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether or not to recover a `:` when parsing patterns that were meant to be paths.
|
||||||
|
#[derive(PartialEq, Copy, Clone)]
|
||||||
|
pub enum RecoverColon {
|
||||||
|
Yes,
|
||||||
|
No,
|
||||||
|
}
|
||||||
|
|
||||||
/// The result of `eat_or_separator`. We want to distinguish which case we are in to avoid
|
/// The result of `eat_or_separator`. We want to distinguish which case we are in to avoid
|
||||||
/// emitting duplicate diagnostics.
|
/// emitting duplicate diagnostics.
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
@ -58,8 +65,9 @@ impl<'a> Parser<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
expected: Expected,
|
expected: Expected,
|
||||||
rc: RecoverComma,
|
rc: RecoverComma,
|
||||||
|
ra: RecoverColon,
|
||||||
) -> PResult<'a, P<Pat>> {
|
) -> PResult<'a, P<Pat>> {
|
||||||
self.parse_pat_allow_top_alt_inner(expected, rc).map(|(pat, _)| pat)
|
self.parse_pat_allow_top_alt_inner(expected, rc, ra).map(|(pat, _)| pat)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the pattern and a bool indicating whether we recovered from a trailing vert (true =
|
/// Returns the pattern and a bool indicating whether we recovered from a trailing vert (true =
|
||||||
|
@ -68,6 +76,7 @@ impl<'a> Parser<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
expected: Expected,
|
expected: Expected,
|
||||||
rc: RecoverComma,
|
rc: RecoverComma,
|
||||||
|
ra: RecoverColon,
|
||||||
) -> PResult<'a, (P<Pat>, bool)> {
|
) -> PResult<'a, (P<Pat>, bool)> {
|
||||||
// Keep track of whether we recovered from a trailing vert so that we can avoid duplicated
|
// Keep track of whether we recovered from a trailing vert so that we can avoid duplicated
|
||||||
// suggestions (which bothers rustfix).
|
// suggestions (which bothers rustfix).
|
||||||
|
@ -89,6 +98,56 @@ impl<'a> Parser<'a> {
|
||||||
// If we parsed a leading `|` which should be gated,
|
// If we parsed a leading `|` which should be gated,
|
||||||
// then we should really gate the leading `|`.
|
// then we should really gate the leading `|`.
|
||||||
// This complicated procedure is done purely for diagnostics UX.
|
// This complicated procedure is done purely for diagnostics UX.
|
||||||
|
let mut first_pat = first_pat;
|
||||||
|
|
||||||
|
if let (RecoverColon::Yes, token::Colon) = (ra, &self.token.kind) {
|
||||||
|
if matches!(
|
||||||
|
first_pat.kind,
|
||||||
|
PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, None)
|
||||||
|
| PatKind::Path(..)
|
||||||
|
) && self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
|
||||||
|
{
|
||||||
|
// The pattern looks like it might be a path with a `::` -> `:` typo:
|
||||||
|
// `match foo { bar:baz => {} }`
|
||||||
|
let span = self.token.span;
|
||||||
|
// We only emit "unexpected `:`" error here if we can successfully parse the
|
||||||
|
// whole pattern correctly in that case.
|
||||||
|
let snapshot = self.clone();
|
||||||
|
|
||||||
|
// Create error for "unexpected `:`".
|
||||||
|
match self.expected_one_of_not_found(&[], &[]) {
|
||||||
|
Err(mut err) => {
|
||||||
|
self.bump(); // Skip the `:`.
|
||||||
|
match self.parse_pat_no_top_alt(expected) {
|
||||||
|
Err(mut inner_err) => {
|
||||||
|
// Carry on as if we had not done anything, callers will emit a
|
||||||
|
// reasonable error.
|
||||||
|
inner_err.cancel();
|
||||||
|
err.cancel();
|
||||||
|
*self = snapshot;
|
||||||
|
}
|
||||||
|
Ok(pat) => {
|
||||||
|
// We've parsed the rest of the pattern.
|
||||||
|
err.span_suggestion(
|
||||||
|
span,
|
||||||
|
"maybe write a path separator here",
|
||||||
|
"::".to_string(),
|
||||||
|
Applicability::MachineApplicable,
|
||||||
|
);
|
||||||
|
err.emit();
|
||||||
|
first_pat =
|
||||||
|
self.mk_pat(first_pat.span.to(pat.span), PatKind::Wild);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// Carry on as if we had not done anything. This should be unreachable.
|
||||||
|
*self = snapshot;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(leading_vert_span) = leading_vert_span {
|
if let Some(leading_vert_span) = leading_vert_span {
|
||||||
// If there was a leading vert, treat this as an or-pattern. This improves
|
// If there was a leading vert, treat this as an or-pattern. This improves
|
||||||
// diagnostics.
|
// diagnostics.
|
||||||
|
@ -140,7 +199,8 @@ impl<'a> Parser<'a> {
|
||||||
// We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
|
// We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
|
||||||
// or-patterns so that we can detect when a user tries to use it. This allows us to print a
|
// or-patterns so that we can detect when a user tries to use it. This allows us to print a
|
||||||
// better error message.
|
// better error message.
|
||||||
let (pat, trailing_vert) = self.parse_pat_allow_top_alt_inner(expected, rc)?;
|
let (pat, trailing_vert) =
|
||||||
|
self.parse_pat_allow_top_alt_inner(expected, rc, RecoverColon::No)?;
|
||||||
let colon = self.eat(&token::Colon);
|
let colon = self.eat(&token::Colon);
|
||||||
|
|
||||||
if let PatKind::Or(pats) = &pat.kind {
|
if let PatKind::Or(pats) = &pat.kind {
|
||||||
|
@ -350,7 +410,7 @@ impl<'a> Parser<'a> {
|
||||||
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
||||||
// Parse `[pat, pat,...]` as a slice pattern.
|
// Parse `[pat, pat,...]` as a slice pattern.
|
||||||
let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| {
|
let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| {
|
||||||
p.parse_pat_allow_top_alt(None, RecoverComma::No)
|
p.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
|
||||||
})?;
|
})?;
|
||||||
PatKind::Slice(pats)
|
PatKind::Slice(pats)
|
||||||
} else if self.check(&token::DotDot) && !self.is_pat_range_end_start(1) {
|
} else if self.check(&token::DotDot) && !self.is_pat_range_end_start(1) {
|
||||||
|
@ -563,8 +623,9 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Parse a tuple or parenthesis pattern.
|
/// Parse a tuple or parenthesis pattern.
|
||||||
fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
|
fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
|
||||||
let (fields, trailing_comma) =
|
let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
|
||||||
self.parse_paren_comma_seq(|p| p.parse_pat_allow_top_alt(None, RecoverComma::No))?;
|
p.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
|
||||||
|
})?;
|
||||||
|
|
||||||
// Here, `(pat,)` is a tuple pattern.
|
// Here, `(pat,)` is a tuple pattern.
|
||||||
// For backward compatibility, `(..)` is a tuple pattern as well.
|
// For backward compatibility, `(..)` is a tuple pattern as well.
|
||||||
|
@ -873,8 +934,9 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
|
/// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
|
||||||
fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
|
fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
|
||||||
let (fields, _) =
|
let (fields, _) = self.parse_paren_comma_seq(|p| {
|
||||||
self.parse_paren_comma_seq(|p| p.parse_pat_allow_top_alt(None, RecoverComma::No))?;
|
p.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
|
||||||
|
})?;
|
||||||
if qself.is_some() {
|
if qself.is_some() {
|
||||||
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
|
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
|
||||||
}
|
}
|
||||||
|
@ -1033,7 +1095,7 @@ impl<'a> Parser<'a> {
|
||||||
// Parsing a pattern of the form `fieldname: pat`.
|
// Parsing a pattern of the form `fieldname: pat`.
|
||||||
let fieldname = self.parse_field_name()?;
|
let fieldname = self.parse_field_name()?;
|
||||||
self.bump();
|
self.bump();
|
||||||
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::No)?;
|
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)?;
|
||||||
hi = pat.span;
|
hi = pat.span;
|
||||||
(pat, fieldname, false)
|
(pat, fieldname, false)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -903,8 +903,8 @@ impl<T> BTreeSet<T> {
|
||||||
self.map.append(&mut other.map);
|
self.map.append(&mut other.map);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Splits the collection into two at the given key. Returns everything after the given key,
|
/// Splits the collection into two at the given value. Returns everything after the given value,
|
||||||
/// including the key.
|
/// including the value.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
@ -933,11 +933,11 @@ impl<T> BTreeSet<T> {
|
||||||
/// assert!(b.contains(&41));
|
/// assert!(b.contains(&41));
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "btree_split_off", since = "1.11.0")]
|
#[stable(feature = "btree_split_off", since = "1.11.0")]
|
||||||
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
|
pub fn split_off<Q: ?Sized + Ord>(&mut self, value: &Q) -> Self
|
||||||
where
|
where
|
||||||
T: Borrow<Q> + Ord,
|
T: Borrow<Q> + Ord,
|
||||||
{
|
{
|
||||||
BTreeSet { map: self.map.split_off(key) }
|
BTreeSet { map: self.map.split_off(value) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates an iterator that visits all values in ascending order and uses a closure
|
/// Creates an iterator that visits all values in ascending order and uses a closure
|
||||||
|
|
|
@ -14,4 +14,4 @@ core = { path = "../core" }
|
||||||
compiler_builtins = { version = "0.1.0", features = ['rustc-dep-of-std'] }
|
compiler_builtins = { version = "0.1.0", features = ['rustc-dep-of-std'] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
cc = "1.0.68"
|
cc = "1.0.69"
|
||||||
|
|
|
@ -21,7 +21,7 @@ compiler_builtins = "0.1.0"
|
||||||
cfg-if = "0.1.8"
|
cfg-if = "0.1.8"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
cc = "1.0.68"
|
cc = "1.0.69"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ cmake = "0.1.38"
|
||||||
filetime = "0.2"
|
filetime = "0.2"
|
||||||
num_cpus = "1.0"
|
num_cpus = "1.0"
|
||||||
getopts = "0.2.19"
|
getopts = "0.2.19"
|
||||||
cc = "1.0.68"
|
cc = "1.0.69"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
serde = { version = "1.0.8", features = ["derive"] }
|
serde = { version = "1.0.8", features = ["derive"] }
|
||||||
serde_json = "1.0.2"
|
serde_json = "1.0.2"
|
||||||
|
|
|
@ -161,7 +161,7 @@ pre, .rustdoc.source .example-wrap {
|
||||||
.search-results a {
|
.search-results a {
|
||||||
color: #0096cf;
|
color: #0096cf;
|
||||||
}
|
}
|
||||||
.search-results a span.desc {
|
.search-results a div.desc {
|
||||||
color: #c5c5c5;
|
color: #c5c5c5;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -286,7 +286,7 @@ details.undocumented > summary::before {
|
||||||
color: grey;
|
color: grey;
|
||||||
}
|
}
|
||||||
|
|
||||||
tr.result span.primitive::after, tr.result span.keyword::after {
|
.result-name .primitive > i, .result-name .keyword > i {
|
||||||
color: #788797;
|
color: #788797;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -247,7 +247,7 @@ details.undocumented > summary::before {
|
||||||
color: grey;
|
color: grey;
|
||||||
}
|
}
|
||||||
|
|
||||||
tr.result span.primitive::after, tr.result span.keyword::after {
|
.result-name .primitive > i, .result-name .keyword > i {
|
||||||
color: #ddd;
|
color: #ddd;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -237,7 +237,7 @@ details.undocumented > summary::before {
|
||||||
color: grey;
|
color: grey;
|
||||||
}
|
}
|
||||||
|
|
||||||
tr.result span.primitive::after, tr.result span.keyword::after {
|
.result-name .primitive > i, .result-name .keyword > i {
|
||||||
color: black;
|
color: black;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
41
src/test/rustdoc-gui/search-result-color.goml
Normal file
41
src/test/rustdoc-gui/search-result-color.goml
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
// The goal of this test is to ensure the color of the text is the one expected.
|
||||||
|
goto: file://|DOC_PATH|/test_docs/index.html?search=coo
|
||||||
|
|
||||||
|
// This is needed so that the text color is computed.
|
||||||
|
show-text: true
|
||||||
|
|
||||||
|
// Ayu theme
|
||||||
|
local-storage: {"rustdoc-theme": "ayu", "rustdoc-preferred-dark-theme": "ayu", "rustdoc-use-system-theme": "false"}
|
||||||
|
reload:
|
||||||
|
|
||||||
|
// Waiting for the search results to appear...
|
||||||
|
wait-for: "#titles"
|
||||||
|
assert-css: ("//*[@class='desc']//*[text()='Just a normal struct.']", {"color": "rgb(197, 197, 197)"})
|
||||||
|
assert-css: ("//*[@class='result-name']/*[text()='test_docs::']", {"color": "rgb(0, 150, 207)"})
|
||||||
|
|
||||||
|
// Checking the color for "keyword".
|
||||||
|
assert-css: ("//*[@class='result-name']//*[text()='(keyword)']", {"color": "rgb(120, 135, 151)"})
|
||||||
|
|
||||||
|
// Dark theme
|
||||||
|
local-storage: {"rustdoc-theme": "dark", "rustdoc-preferred-dark-theme": "dark", "rustdoc-use-system-theme": "false"}
|
||||||
|
reload:
|
||||||
|
|
||||||
|
// Waiting for the search results to appear...
|
||||||
|
wait-for: "#titles"
|
||||||
|
assert-css: ("//*[@class='desc']//*[text()='Just a normal struct.']", {"color": "rgb(221, 221, 221)"})
|
||||||
|
assert-css: ("//*[@class='result-name']/*[text()='test_docs::']", {"color": "rgb(221, 221, 221)"})
|
||||||
|
|
||||||
|
// Checking the color for "keyword".
|
||||||
|
assert-css: ("//*[@class='result-name']//*[text()='(keyword)']", {"color": "rgb(221, 221, 221)"})
|
||||||
|
|
||||||
|
// Light theme
|
||||||
|
local-storage: {"rustdoc-theme": "light", "rustdoc-use-system-theme": "false"}
|
||||||
|
reload:
|
||||||
|
|
||||||
|
// Waiting for the search results to appear...
|
||||||
|
wait-for: "#titles"
|
||||||
|
assert-css: ("//*[@class='desc']//*[text()='Just a normal struct.']", {"color": "rgb(0, 0, 0)"})
|
||||||
|
assert-css: ("//*[@class='result-name']/*[text()='test_docs::']", {"color": "rgb(0, 0, 0)"})
|
||||||
|
|
||||||
|
// Checking the color for "keyword".
|
||||||
|
assert-css: ("//*[@class='result-name']//*[text()='(keyword)']", {"color": "rgb(0, 0, 0)"})
|
20
src/test/rustdoc-gui/search-result-go-to-first.goml
Normal file
20
src/test/rustdoc-gui/search-result-go-to-first.goml
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
// This test ensures that the "go_to_first" feature is working as expected.
|
||||||
|
|
||||||
|
// First, we check that the first page doesn't have the string we're looking for to ensure
|
||||||
|
// that the feature is changing page as expected.
|
||||||
|
goto: file://|DOC_PATH|/test_docs/index.html
|
||||||
|
assert-text-false: (".fqn .in-band", "Struct test_docs::Foo")
|
||||||
|
|
||||||
|
// We now check that we land on the search result page if "go_to_first" isn't set.
|
||||||
|
goto: file://|DOC_PATH|/test_docs/index.html?search=struct%3AFoo
|
||||||
|
// Waiting for the search results to appear...
|
||||||
|
wait-for: "#titles"
|
||||||
|
assert-text-false: (".fqn .in-band", "Struct test_docs::Foo")
|
||||||
|
// Ensure that the search results are displayed, not the "normal" content.
|
||||||
|
assert-css: ("#main", {"display": "none"})
|
||||||
|
|
||||||
|
// Now we can check that the feature is working as expected!
|
||||||
|
goto: file://|DOC_PATH|/test_docs/index.html?search=struct%3AFoo&go_to_first=true
|
||||||
|
// Waiting for the page to load...
|
||||||
|
wait-for: 500
|
||||||
|
assert-text: (".fqn .in-band", "Struct test_docs::Foo")
|
|
@ -101,6 +101,7 @@ pub enum AnEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[doc(keyword = "CookieMonster")]
|
#[doc(keyword = "CookieMonster")]
|
||||||
|
/// Some keyword.
|
||||||
pub mod keyword {}
|
pub mod keyword {}
|
||||||
|
|
||||||
/// Just some type alias.
|
/// Just some type alias.
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
#![allow(deprecated, deprecated_in_future)]
|
#![allow(deprecated, deprecated_in_future)]
|
||||||
|
|
||||||
// aux-build:i8.rs
|
// aux-build:i8.rs
|
||||||
// ignore-pretty issue #37201
|
|
||||||
|
|
||||||
extern crate i8;
|
extern crate i8;
|
||||||
use std::string as i16;
|
use std::string as i16;
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
// run-pass
|
// run-pass
|
||||||
// ignore-pretty issue #37201
|
|
||||||
|
|
||||||
struct X { val: i32 }
|
struct X { val: i32 }
|
||||||
impl std::ops::Deref for X {
|
impl std::ops::Deref for X {
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
// run-pass
|
// run-pass
|
||||||
#![allow(non_upper_case_globals)]
|
#![allow(non_upper_case_globals)]
|
||||||
|
|
||||||
// ignore-pretty issue #37201
|
|
||||||
|
|
||||||
// This test is ensuring that parameters are indeed dropped after
|
// This test is ensuring that parameters are indeed dropped after
|
||||||
// temporaries in a fn body.
|
// temporaries in a fn body.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
// run-pass
|
// run-pass
|
||||||
// ignore-pretty issue #37201
|
|
||||||
|
|
||||||
// Check that when a `let`-binding occurs in a loop, its associated
|
// Check that when a `let`-binding occurs in a loop, its associated
|
||||||
// drop-flag is reinitialized (to indicate "needs-drop" at the end of
|
// drop-flag is reinitialized (to indicate "needs-drop" at the end of
|
||||||
|
|
66
src/test/ui/parser/issue-87086-colon-path-sep.rs
Normal file
66
src/test/ui/parser/issue-87086-colon-path-sep.rs
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
// Tests that a suggestion is issued if the user wrote a colon instead of
|
||||||
|
// a path separator in a match arm.
|
||||||
|
|
||||||
|
enum Foo {
|
||||||
|
Bar,
|
||||||
|
Baz,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f() -> Foo { Foo::Bar }
|
||||||
|
|
||||||
|
fn g1() {
|
||||||
|
match f() {
|
||||||
|
Foo:Bar => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
//~| HELP: maybe write a path separator here
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
match f() {
|
||||||
|
Foo::Bar:Baz => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
//~| HELP: maybe write a path separator here
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
match f() {
|
||||||
|
Foo:Bar::Baz => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
//~| HELP: maybe write a path separator here
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
match f() {
|
||||||
|
Foo: Bar::Baz if true => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
//~| HELP: maybe write a path separator here
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
if let Bar:Baz = f() {
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
//~| HELP: maybe write a path separator here
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn g1_neg() {
|
||||||
|
match f() {
|
||||||
|
ref Foo: Bar::Baz => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn g2_neg() {
|
||||||
|
match f() {
|
||||||
|
mut Foo: Bar::Baz => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let myfoo = Foo::Bar;
|
||||||
|
match myfoo {
|
||||||
|
Foo::Bar => {}
|
||||||
|
Foo:Bar::Baz => {}
|
||||||
|
//~^ ERROR: expected one of
|
||||||
|
//~| HELP: maybe write a path separator here
|
||||||
|
}
|
||||||
|
}
|
68
src/test/ui/parser/issue-87086-colon-path-sep.stderr
Normal file
68
src/test/ui/parser/issue-87086-colon-path-sep.stderr
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
error: expected one of `@` or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:13:12
|
||||||
|
|
|
||||||
|
LL | Foo:Bar => {}
|
||||||
|
| ^
|
||||||
|
| |
|
||||||
|
| expected one of `@` or `|`
|
||||||
|
| help: maybe write a path separator here: `::`
|
||||||
|
|
||||||
|
error: expected one of `!`, `(`, `...`, `..=`, `..`, `::`, `{`, or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:19:17
|
||||||
|
|
|
||||||
|
LL | Foo::Bar:Baz => {}
|
||||||
|
| ^
|
||||||
|
| |
|
||||||
|
| expected one of 8 possible tokens
|
||||||
|
| help: maybe write a path separator here: `::`
|
||||||
|
|
||||||
|
error: expected one of `@` or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:25:12
|
||||||
|
|
|
||||||
|
LL | Foo:Bar::Baz => {}
|
||||||
|
| ^
|
||||||
|
| |
|
||||||
|
| expected one of `@` or `|`
|
||||||
|
| help: maybe write a path separator here: `::`
|
||||||
|
|
||||||
|
error: expected one of `@` or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:31:12
|
||||||
|
|
|
||||||
|
LL | Foo: Bar::Baz if true => {}
|
||||||
|
| ^
|
||||||
|
| |
|
||||||
|
| expected one of `@` or `|`
|
||||||
|
| help: maybe write a path separator here: `::`
|
||||||
|
|
||||||
|
error: expected one of `@` or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:36:15
|
||||||
|
|
|
||||||
|
LL | if let Bar:Baz = f() {
|
||||||
|
| ^
|
||||||
|
| |
|
||||||
|
| expected one of `@` or `|`
|
||||||
|
| help: maybe write a path separator here: `::`
|
||||||
|
|
||||||
|
error: expected one of `=>`, `@`, `if`, or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:44:16
|
||||||
|
|
|
||||||
|
LL | ref Foo: Bar::Baz => {}
|
||||||
|
| ^ expected one of `=>`, `@`, `if`, or `|`
|
||||||
|
|
||||||
|
error: expected one of `=>`, `@`, `if`, or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:52:16
|
||||||
|
|
|
||||||
|
LL | mut Foo: Bar::Baz => {}
|
||||||
|
| ^ expected one of `=>`, `@`, `if`, or `|`
|
||||||
|
|
||||||
|
error: expected one of `@` or `|`, found `:`
|
||||||
|
--> $DIR/issue-87086-colon-path-sep.rs:62:12
|
||||||
|
|
|
||||||
|
LL | Foo:Bar::Baz => {}
|
||||||
|
| ^
|
||||||
|
| |
|
||||||
|
| expected one of `@` or `|`
|
||||||
|
| help: maybe write a path separator here: `::`
|
||||||
|
|
||||||
|
error: aborting due to 8 previous errors
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue