Auto merge of #121142 - GuillaumeGomez:rollup-5qmksjw, r=GuillaumeGomez
Rollup of 8 pull requests Successful merges: - #120449 (Document requirements for unsized {Rc,Arc}::from_raw) - #120505 (Fix BTreeMap's Cursor::remove_{next,prev}) - #120672 (std::thread update freebsd stack guard handling.) - #121088 (Implicitly enable evex512 if avx512 is enabled) - #121104 (Ignore unsized types when trying to determine the size of the original type) - #121107 (Fix msg for verbose suggestions with confusable capitalization) - #121113 (Continue compilation even if inherent impl checks fail) - #121120 (Add `ErrorGuaranteed` to `ast::LitKind::Err`, `token::LitKind::Err`.) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
cbddf31863
58 changed files with 630 additions and 277 deletions
|
@ -1846,7 +1846,7 @@ pub enum LitKind {
|
|||
/// A boolean literal (`true`, `false`).
|
||||
Bool(bool),
|
||||
/// Placeholder for a literal that wasn't well-formed in some way.
|
||||
Err,
|
||||
Err(ErrorGuaranteed),
|
||||
}
|
||||
|
||||
impl LitKind {
|
||||
|
@ -1893,7 +1893,7 @@ impl LitKind {
|
|||
| LitKind::Int(_, LitIntType::Unsuffixed)
|
||||
| LitKind::Float(_, LitFloatType::Unsuffixed)
|
||||
| LitKind::Bool(..)
|
||||
| LitKind::Err => false,
|
||||
| LitKind::Err(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ use rustc_macros::HashStable_Generic;
|
|||
use rustc_span::symbol::{kw, sym};
|
||||
#[allow(hidden_glob_reexports)]
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{edition::Edition, Span, DUMMY_SP};
|
||||
use rustc_span::{edition::Edition, ErrorGuaranteed, Span, DUMMY_SP};
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
|
||||
|
@ -75,7 +75,7 @@ pub enum LitKind {
|
|||
ByteStrRaw(u8), // raw byte string delimited by `n` hash symbols
|
||||
CStr,
|
||||
CStrRaw(u8),
|
||||
Err,
|
||||
Err(ErrorGuaranteed),
|
||||
}
|
||||
|
||||
/// A literal token.
|
||||
|
@ -144,7 +144,7 @@ impl fmt::Display for Lit {
|
|||
CStrRaw(n) => {
|
||||
write!(f, "cr{delim}\"{symbol}\"{delim}", delim = "#".repeat(n as usize))?
|
||||
}
|
||||
Integer | Float | Bool | Err => write!(f, "{symbol}")?,
|
||||
Integer | Float | Bool | Err(_) => write!(f, "{symbol}")?,
|
||||
}
|
||||
|
||||
if let Some(suffix) = suffix {
|
||||
|
@ -159,7 +159,7 @@ impl LitKind {
|
|||
/// An English article for the literal token kind.
|
||||
pub fn article(self) -> &'static str {
|
||||
match self {
|
||||
Integer | Err => "an",
|
||||
Integer | Err(_) => "an",
|
||||
_ => "a",
|
||||
}
|
||||
}
|
||||
|
@ -174,12 +174,12 @@ impl LitKind {
|
|||
Str | StrRaw(..) => "string",
|
||||
ByteStr | ByteStrRaw(..) => "byte string",
|
||||
CStr | CStrRaw(..) => "C string",
|
||||
Err => "error",
|
||||
Err(_) => "error",
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn may_have_suffix(self) -> bool {
|
||||
matches!(self, Integer | Float | Err)
|
||||
matches!(self, Integer | Float | Err(_))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,20 +31,21 @@ pub fn escape_byte_str_symbol(bytes: &[u8]) -> Symbol {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum LitError {
|
||||
LexerError,
|
||||
InvalidSuffix,
|
||||
InvalidIntSuffix,
|
||||
InvalidFloatSuffix,
|
||||
NonDecimalFloat(u32),
|
||||
IntTooLarge(u32),
|
||||
InvalidSuffix(Symbol),
|
||||
InvalidIntSuffix(Symbol),
|
||||
InvalidFloatSuffix(Symbol),
|
||||
NonDecimalFloat(u32), // u32 is the base
|
||||
IntTooLarge(u32), // u32 is the base
|
||||
}
|
||||
|
||||
impl LitKind {
|
||||
/// Converts literal token into a semantic literal.
|
||||
pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
|
||||
let token::Lit { kind, symbol, suffix } = lit;
|
||||
if suffix.is_some() && !kind.may_have_suffix() {
|
||||
return Err(LitError::InvalidSuffix);
|
||||
if let Some(suffix) = suffix
|
||||
&& !kind.may_have_suffix()
|
||||
{
|
||||
return Err(LitError::InvalidSuffix(suffix));
|
||||
}
|
||||
|
||||
// For byte/char/string literals, chars and escapes have already been
|
||||
|
@ -145,7 +146,7 @@ impl LitKind {
|
|||
buf.push(0);
|
||||
LitKind::CStr(buf.into(), StrStyle::Raw(n))
|
||||
}
|
||||
token::Err => LitKind::Err,
|
||||
token::Err(guar) => LitKind::Err(guar),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -202,7 +203,7 @@ impl fmt::Display for LitKind {
|
|||
}
|
||||
}
|
||||
LitKind::Bool(b) => write!(f, "{}", if b { "true" } else { "false" })?,
|
||||
LitKind::Err => {
|
||||
LitKind::Err(_) => {
|
||||
// This only shows up in places like `-Zunpretty=hir` output, so we
|
||||
// don't bother to produce something useful.
|
||||
write!(f, "<bad-literal>")?;
|
||||
|
@ -238,7 +239,7 @@ impl MetaItemLit {
|
|||
LitKind::Char(_) => token::Char,
|
||||
LitKind::Int(..) => token::Integer,
|
||||
LitKind::Float(..) => token::Float,
|
||||
LitKind::Err => token::Err,
|
||||
LitKind::Err(guar) => token::Err(guar),
|
||||
};
|
||||
|
||||
token::Lit::new(kind, self.symbol, self.suffix)
|
||||
|
@ -272,12 +273,12 @@ fn filtered_float_lit(
|
|||
return Err(LitError::NonDecimalFloat(base));
|
||||
}
|
||||
Ok(match suffix {
|
||||
Some(suf) => LitKind::Float(
|
||||
Some(suffix) => LitKind::Float(
|
||||
symbol,
|
||||
ast::LitFloatType::Suffixed(match suf {
|
||||
ast::LitFloatType::Suffixed(match suffix {
|
||||
sym::f32 => ast::FloatTy::F32,
|
||||
sym::f64 => ast::FloatTy::F64,
|
||||
_ => return Err(LitError::InvalidFloatSuffix),
|
||||
_ => return Err(LitError::InvalidFloatSuffix(suffix)),
|
||||
}),
|
||||
),
|
||||
None => LitKind::Float(symbol, ast::LitFloatType::Unsuffixed),
|
||||
|
@ -318,17 +319,13 @@ fn integer_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitErr
|
|||
// `1f64` and `2f32` etc. are valid float literals, and
|
||||
// `fxxx` looks more like an invalid float literal than invalid integer literal.
|
||||
_ if suf.as_str().starts_with('f') => return filtered_float_lit(symbol, suffix, base),
|
||||
_ => return Err(LitError::InvalidIntSuffix),
|
||||
_ => return Err(LitError::InvalidIntSuffix(suf)),
|
||||
},
|
||||
_ => ast::LitIntType::Unsuffixed,
|
||||
};
|
||||
|
||||
let s = &s[if base != 10 { 2 } else { 0 }..];
|
||||
u128::from_str_radix(s, base).map(|i| LitKind::Int(i.into(), ty)).map_err(|_| {
|
||||
// Small bases are lexed as if they were base 10, e.g, the string
|
||||
// might be `0b10201`. This will cause the conversion above to fail,
|
||||
// but these kinds of errors are already reported by the lexer.
|
||||
let from_lexer = base < 10 && s.chars().any(|c| c.to_digit(10).is_some_and(|d| d >= base));
|
||||
if from_lexer { LitError::LexerError } else { LitError::IntTooLarge(base) }
|
||||
})
|
||||
u128::from_str_radix(s, base)
|
||||
.map(|i| LitKind::Int(i.into(), ty))
|
||||
.map_err(|_| LitError::IntTooLarge(base))
|
||||
}
|
||||
|
|
|
@ -124,8 +124,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let lit_kind = match LitKind::from_token_lit(*token_lit) {
|
||||
Ok(lit_kind) => lit_kind,
|
||||
Err(err) => {
|
||||
report_lit_error(&self.tcx.sess.parse_sess, err, *token_lit, e.span);
|
||||
LitKind::Err
|
||||
let guar = report_lit_error(
|
||||
&self.tcx.sess.parse_sess,
|
||||
err,
|
||||
*token_lit,
|
||||
e.span,
|
||||
);
|
||||
LitKind::Err(guar)
|
||||
}
|
||||
};
|
||||
let lit = self.arena.alloc(respan(self.lower_span(e.span), lit_kind));
|
||||
|
|
|
@ -966,10 +966,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
{
|
||||
lit
|
||||
} else {
|
||||
let guar = self.dcx().has_errors().unwrap();
|
||||
MetaItemLit {
|
||||
symbol: kw::Empty,
|
||||
suffix: None,
|
||||
kind: LitKind::Err,
|
||||
kind: LitKind::Err(guar),
|
||||
span: DUMMY_SP,
|
||||
}
|
||||
};
|
||||
|
|
|
@ -254,7 +254,7 @@ fn literal_to_string(lit: token::Lit) -> String {
|
|||
token::CStrRaw(n) => {
|
||||
format!("cr{delim}\"{symbol}\"{delim}", delim = "#".repeat(n as usize))
|
||||
}
|
||||
token::Integer | token::Float | token::Bool | token::Err => symbol.to_string(),
|
||||
token::Integer | token::Float | token::Bool | token::Err(_) => symbol.to_string(),
|
||||
};
|
||||
|
||||
if let Some(suffix) = suffix {
|
||||
|
|
|
@ -40,7 +40,7 @@ pub fn expand_concat(
|
|||
cx.dcx().emit_err(errors::ConcatBytestr { span: e.span });
|
||||
has_errors = true;
|
||||
}
|
||||
Ok(ast::LitKind::Err) => {
|
||||
Ok(ast::LitKind::Err(_)) => {
|
||||
has_errors = true;
|
||||
}
|
||||
Err(err) => {
|
||||
|
|
|
@ -44,7 +44,7 @@ fn invalid_type_err(
|
|||
Ok(ast::LitKind::Bool(_)) => {
|
||||
dcx.emit_err(ConcatBytesInvalid { span, lit_kind: "boolean", sugg: None });
|
||||
}
|
||||
Ok(ast::LitKind::Err) => {}
|
||||
Ok(ast::LitKind::Err(_)) => {}
|
||||
Ok(ast::LitKind::Int(_, _)) if !is_nested => {
|
||||
let sugg =
|
||||
snippet.map(|snippet| ConcatBytesInvalidSuggestion::IntLit { span: span, snippet });
|
||||
|
|
|
@ -266,6 +266,10 @@ pub fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> LLVMFeature<'a> {
|
|||
("riscv32" | "riscv64", "fast-unaligned-access") if get_version().0 <= 17 => {
|
||||
LLVMFeature::new("unaligned-scalar-mem")
|
||||
}
|
||||
// For LLVM 18, enable the evex512 target feature if a avx512 target feature is enabled.
|
||||
("x86", s) if get_version().0 >= 18 && s.starts_with("avx512") => {
|
||||
LLVMFeature::with_dependency(s, TargetFeatureFoldStrength::EnableOnly("evex512"))
|
||||
}
|
||||
(_, s) => LLVMFeature::new(s),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1743,9 +1743,17 @@ impl HumanEmitter {
|
|||
buffer.append(0, level.to_str(), Style::Level(*level));
|
||||
buffer.append(0, ": ", Style::HeaderMsg);
|
||||
|
||||
let mut msg = vec![(suggestion.msg.to_owned(), Style::NoStyle)];
|
||||
if suggestions
|
||||
.iter()
|
||||
.take(MAX_SUGGESTIONS)
|
||||
.any(|(_, _, _, only_capitalization)| *only_capitalization)
|
||||
{
|
||||
msg.push((" (notice the capitalization difference)".into(), Style::NoStyle));
|
||||
}
|
||||
self.msgs_to_buffer(
|
||||
&mut buffer,
|
||||
&[(suggestion.msg.to_owned(), Style::NoStyle)],
|
||||
&msg,
|
||||
args,
|
||||
max_line_num_len,
|
||||
"suggestion",
|
||||
|
@ -1754,12 +1762,8 @@ impl HumanEmitter {
|
|||
|
||||
let mut row_num = 2;
|
||||
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
|
||||
let mut notice_capitalization = false;
|
||||
for (complete, parts, highlights, only_capitalization) in
|
||||
suggestions.iter().take(MAX_SUGGESTIONS)
|
||||
{
|
||||
for (complete, parts, highlights, _) in suggestions.iter().take(MAX_SUGGESTIONS) {
|
||||
debug!(?complete, ?parts, ?highlights);
|
||||
notice_capitalization |= only_capitalization;
|
||||
|
||||
let has_deletion = parts.iter().any(|p| p.is_deletion(sm));
|
||||
let is_multiline = complete.lines().count() > 1;
|
||||
|
@ -2058,9 +2062,6 @@ impl HumanEmitter {
|
|||
let others = suggestions.len() - MAX_SUGGESTIONS;
|
||||
let msg = format!("and {} other candidate{}", others, pluralize!(others));
|
||||
buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
|
||||
} else if notice_capitalization {
|
||||
let msg = "notice the capitalization difference";
|
||||
buffer.puts(row_num, max_line_num_len + 3, msg, Style::NoStyle);
|
||||
}
|
||||
emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
|
||||
Ok(())
|
||||
|
|
|
@ -320,7 +320,9 @@ impl CodeSuggestion {
|
|||
// We need to keep track of the difference between the existing code and the added
|
||||
// or deleted code in order to point at the correct column *after* substitution.
|
||||
let mut acc = 0;
|
||||
let mut only_capitalization = false;
|
||||
for part in &substitution.parts {
|
||||
only_capitalization |= is_case_difference(sm, &part.snippet, part.span);
|
||||
let cur_lo = sm.lookup_char_pos(part.span.lo());
|
||||
if prev_hi.line == cur_lo.line {
|
||||
let mut count =
|
||||
|
@ -393,7 +395,6 @@ impl CodeSuggestion {
|
|||
}
|
||||
}
|
||||
highlights.push(std::mem::take(&mut line_highlight));
|
||||
let only_capitalization = is_case_difference(sm, &buf, bounding_span);
|
||||
// if the replacement already ends with a newline, don't print the next line
|
||||
if !buf.ends_with('\n') {
|
||||
push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, None);
|
||||
|
|
|
@ -1266,7 +1266,7 @@ pub fn expr_to_spanned_string<'a>(
|
|||
);
|
||||
Some((err, true))
|
||||
}
|
||||
Ok(ast::LitKind::Err) => None,
|
||||
Ok(ast::LitKind::Err(_)) => None,
|
||||
Err(err) => {
|
||||
report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span);
|
||||
None
|
||||
|
|
|
@ -10,7 +10,7 @@ use rustc_ast::util::literal::escape_byte_str_symbol;
|
|||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{MultiSpan, PResult};
|
||||
use rustc_errors::{ErrorGuaranteed, MultiSpan, PResult};
|
||||
use rustc_parse::lexer::nfc_normalize;
|
||||
use rustc_parse::parse_stream_from_source_str;
|
||||
use rustc_session::parse::ParseSess;
|
||||
|
@ -63,7 +63,12 @@ impl FromInternal<token::LitKind> for LitKind {
|
|||
token::ByteStrRaw(n) => LitKind::ByteStrRaw(n),
|
||||
token::CStr => LitKind::CStr,
|
||||
token::CStrRaw(n) => LitKind::CStrRaw(n),
|
||||
token::Err => LitKind::Err,
|
||||
token::Err(_guar) => {
|
||||
// This is the only place a `pm::bridge::LitKind::ErrWithGuar`
|
||||
// is constructed. Note that an `ErrorGuaranteed` is available,
|
||||
// as required. See the comment in `to_internal`.
|
||||
LitKind::ErrWithGuar
|
||||
}
|
||||
token::Bool => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +87,16 @@ impl ToInternal<token::LitKind> for LitKind {
|
|||
LitKind::ByteStrRaw(n) => token::ByteStrRaw(n),
|
||||
LitKind::CStr => token::CStr,
|
||||
LitKind::CStrRaw(n) => token::CStrRaw(n),
|
||||
LitKind::Err => token::Err,
|
||||
LitKind::ErrWithGuar => {
|
||||
// This is annoying but valid. `LitKind::ErrWithGuar` would
|
||||
// have an `ErrorGuaranteed` except that type isn't available
|
||||
// in that crate. So we have to fake one. And we don't want to
|
||||
// use a delayed bug because there might be lots of these,
|
||||
// which would be expensive.
|
||||
#[allow(deprecated)]
|
||||
let guar = ErrorGuaranteed::unchecked_error_guaranteed();
|
||||
token::Err(guar)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -477,7 +491,7 @@ impl server::FreeFunctions for Rustc<'_, '_> {
|
|||
| token::LitKind::ByteStrRaw(_)
|
||||
| token::LitKind::CStr
|
||||
| token::LitKind::CStrRaw(_)
|
||||
| token::LitKind::Err => return Err(()),
|
||||
| token::LitKind::Err(_) => return Err(()),
|
||||
token::LitKind::Integer | token::LitKind::Float => {}
|
||||
}
|
||||
|
||||
|
|
|
@ -178,8 +178,9 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
|||
let _ = tcx.ensure().coherent_trait(trait_def_id);
|
||||
}
|
||||
// these queries are executed for side-effects (error reporting):
|
||||
res.and(tcx.ensure().crate_inherent_impls(()))
|
||||
.and(tcx.ensure().crate_inherent_impls_overlap_check(()))
|
||||
let _ = tcx.ensure().crate_inherent_impls(());
|
||||
let _ = tcx.ensure().crate_inherent_impls_overlap_check(());
|
||||
res
|
||||
})?;
|
||||
|
||||
if tcx.features().rustc_attrs {
|
||||
|
|
|
@ -1319,7 +1319,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
tcx.type_of(tcx.require_lang_item(hir::LangItem::CStr, Some(lit.span)))
|
||||
.skip_binder(),
|
||||
),
|
||||
ast::LitKind::Err => Ty::new_misc_error(tcx),
|
||||
ast::LitKind::Err(guar) => Ty::new_error(tcx, guar),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -207,6 +207,13 @@ fn is_cast_to_bigger_memory_layout<'tcx>(
|
|||
}
|
||||
|
||||
let from_layout = cx.layout_of(*inner_start_ty).ok()?;
|
||||
|
||||
// if the type isn't sized, we bail out, instead of potentially giving
|
||||
// the user a meaningless warning.
|
||||
if from_layout.is_unsized() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let alloc_layout = cx.layout_of(alloc_ty).ok()?;
|
||||
let to_layout = cx.layout_of(*inner_end_ty).ok()?;
|
||||
|
||||
|
|
|
@ -164,11 +164,7 @@ fn lit_to_mir_constant<'tcx>(
|
|||
})?,
|
||||
(ast::LitKind::Bool(b), ty::Bool) => ConstValue::Scalar(Scalar::from_bool(*b)),
|
||||
(ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)),
|
||||
(ast::LitKind::Err, _) => {
|
||||
return Err(LitToConstError::Reported(
|
||||
tcx.dcx().delayed_bug("encountered LitKind::Err during mir build"),
|
||||
));
|
||||
}
|
||||
(ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)),
|
||||
_ => return Err(LitToConstError::TypeError),
|
||||
};
|
||||
|
||||
|
|
|
@ -71,11 +71,7 @@ pub(crate) fn lit_to_const<'tcx>(
|
|||
ty::ValTree::from_scalar_int(bits)
|
||||
}
|
||||
(ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()),
|
||||
(ast::LitKind::Err, _) => {
|
||||
return Err(LitToConstError::Reported(
|
||||
tcx.dcx().delayed_bug("encountered LitKind::Err during mir build"),
|
||||
));
|
||||
}
|
||||
(ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)),
|
||||
_ => return Err(LitToConstError::TypeError),
|
||||
};
|
||||
|
||||
|
|
|
@ -897,12 +897,14 @@ impl<'tcx> Cx<'tcx> {
|
|||
let hir_id = self.tcx.local_def_id_to_hir_id(def_id.expect_local());
|
||||
let generics = self.tcx.generics_of(hir_id.owner);
|
||||
let Some(&index) = generics.param_def_id_to_index.get(&def_id) else {
|
||||
self.tcx.dcx().has_errors().unwrap();
|
||||
let guar = self.tcx.dcx().has_errors().unwrap();
|
||||
// We already errored about a late bound const
|
||||
return ExprKind::Literal {
|
||||
lit: &Spanned { span: DUMMY_SP, node: LitKind::Err },
|
||||
neg: false,
|
||||
};
|
||||
|
||||
let lit = self
|
||||
.tcx
|
||||
.hir_arena
|
||||
.alloc(Spanned { span: DUMMY_SP, node: LitKind::Err(guar) });
|
||||
return ExprKind::Literal { lit, neg: false };
|
||||
};
|
||||
let name = self.tcx.hir().name(hir_id);
|
||||
let param = ty::ParamConst::new(index, name);
|
||||
|
|
|
@ -16,7 +16,7 @@ use rustc_session::lint::builtin::{
|
|||
};
|
||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::{edition::Edition, BytePos, Pos, Span};
|
||||
|
||||
mod diagnostics;
|
||||
|
@ -478,26 +478,27 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
}
|
||||
}
|
||||
rustc_lexer::LiteralKind::Int { base, empty_int } => {
|
||||
let mut kind = token::Integer;
|
||||
if empty_int {
|
||||
let span = self.mk_sp(start, end);
|
||||
self.dcx().emit_err(errors::NoDigitsLiteral { span });
|
||||
(token::Integer, sym::integer(0))
|
||||
} else {
|
||||
if matches!(base, Base::Binary | Base::Octal) {
|
||||
let base = base as u32;
|
||||
let s = self.str_from_to(start + BytePos(2), end);
|
||||
for (idx, c) in s.char_indices() {
|
||||
let span = self.mk_sp(
|
||||
start + BytePos::from_usize(2 + idx),
|
||||
start + BytePos::from_usize(2 + idx + c.len_utf8()),
|
||||
);
|
||||
if c != '_' && c.to_digit(base).is_none() {
|
||||
let guar = self.dcx().emit_err(errors::NoDigitsLiteral { span });
|
||||
kind = token::Err(guar);
|
||||
} else if matches!(base, Base::Binary | Base::Octal) {
|
||||
let base = base as u32;
|
||||
let s = self.str_from_to(start + BytePos(2), end);
|
||||
for (idx, c) in s.char_indices() {
|
||||
let span = self.mk_sp(
|
||||
start + BytePos::from_usize(2 + idx),
|
||||
start + BytePos::from_usize(2 + idx + c.len_utf8()),
|
||||
);
|
||||
if c != '_' && c.to_digit(base).is_none() {
|
||||
let guar =
|
||||
self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
|
||||
}
|
||||
kind = token::Err(guar);
|
||||
}
|
||||
}
|
||||
(token::Integer, self.symbol_from_to(start, end))
|
||||
}
|
||||
(kind, self.symbol_from_to(start, end))
|
||||
}
|
||||
rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
|
||||
if empty_exponent {
|
||||
|
@ -691,7 +692,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
|
||||
fn cook_common(
|
||||
&self,
|
||||
kind: token::LitKind,
|
||||
mut kind: token::LitKind,
|
||||
mode: Mode,
|
||||
start: BytePos,
|
||||
end: BytePos,
|
||||
|
@ -699,7 +700,6 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
postfix_len: u32,
|
||||
unescape: fn(&str, Mode, &mut dyn FnMut(Range<usize>, Result<(), EscapeError>)),
|
||||
) -> (token::LitKind, Symbol) {
|
||||
let mut has_fatal_err = false;
|
||||
let content_start = start + BytePos(prefix_len);
|
||||
let content_end = end - BytePos(postfix_len);
|
||||
let lit_content = self.str_from_to(content_start, content_end);
|
||||
|
@ -711,10 +711,8 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
let lo = content_start + BytePos(start);
|
||||
let hi = lo + BytePos(end - start);
|
||||
let span = self.mk_sp(lo, hi);
|
||||
if err.is_fatal() {
|
||||
has_fatal_err = true;
|
||||
}
|
||||
emit_unescape_error(
|
||||
let is_fatal = err.is_fatal();
|
||||
if let Some(guar) = emit_unescape_error(
|
||||
self.dcx(),
|
||||
lit_content,
|
||||
span_with_quotes,
|
||||
|
@ -722,17 +720,21 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
mode,
|
||||
range,
|
||||
err,
|
||||
);
|
||||
) {
|
||||
assert!(is_fatal);
|
||||
kind = token::Err(guar);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// We normally exclude the quotes for the symbol, but for errors we
|
||||
// include it because it results in clearer error messages.
|
||||
if !has_fatal_err {
|
||||
(kind, Symbol::intern(lit_content))
|
||||
let sym = if !matches!(kind, token::Err(_)) {
|
||||
Symbol::intern(lit_content)
|
||||
} else {
|
||||
(token::Err, self.symbol_from_to(start, end))
|
||||
}
|
||||
self.symbol_from_to(start, end)
|
||||
};
|
||||
(kind, sym)
|
||||
}
|
||||
|
||||
fn cook_unicode(
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::iter::once;
|
||||
use std::ops::Range;
|
||||
|
||||
use rustc_errors::{Applicability, DiagCtxt};
|
||||
use rustc_errors::{Applicability, DiagCtxt, ErrorGuaranteed};
|
||||
use rustc_lexer::unescape::{EscapeError, Mode};
|
||||
use rustc_span::{BytePos, Span};
|
||||
|
||||
|
@ -21,7 +21,7 @@ pub(crate) fn emit_unescape_error(
|
|||
// range of the error inside `lit`
|
||||
range: Range<usize>,
|
||||
error: EscapeError,
|
||||
) {
|
||||
) -> Option<ErrorGuaranteed> {
|
||||
debug!(
|
||||
"emit_unescape_error: {:?}, {:?}, {:?}, {:?}, {:?}",
|
||||
lit, full_lit_span, mode, range, error
|
||||
|
@ -31,12 +31,12 @@ pub(crate) fn emit_unescape_error(
|
|||
let span = err_span.with_lo(err_span.hi() - BytePos(c.len_utf8() as u32));
|
||||
(c, span)
|
||||
};
|
||||
match error {
|
||||
Some(match error {
|
||||
EscapeError::LoneSurrogateUnicodeEscape => {
|
||||
dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true });
|
||||
dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true })
|
||||
}
|
||||
EscapeError::OutOfRangeUnicodeEscape => {
|
||||
dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false });
|
||||
dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false })
|
||||
}
|
||||
EscapeError::MoreThanOneChar => {
|
||||
use unicode_normalization::{char::is_combining_mark, UnicodeNormalization};
|
||||
|
@ -106,7 +106,7 @@ pub(crate) fn emit_unescape_error(
|
|||
span: full_lit_span,
|
||||
note,
|
||||
suggestion: sugg,
|
||||
});
|
||||
})
|
||||
}
|
||||
EscapeError::EscapeOnlyChar => {
|
||||
let (c, char_span) = last_char();
|
||||
|
@ -116,15 +116,15 @@ pub(crate) fn emit_unescape_error(
|
|||
escaped_sugg: c.escape_default().to_string(),
|
||||
escaped_msg: escaped_char(c),
|
||||
byte: mode == Mode::Byte,
|
||||
});
|
||||
})
|
||||
}
|
||||
EscapeError::BareCarriageReturn => {
|
||||
let double_quotes = mode.in_double_quotes();
|
||||
dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes });
|
||||
dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes })
|
||||
}
|
||||
EscapeError::BareCarriageReturnInRawString => {
|
||||
assert!(mode.in_double_quotes());
|
||||
dcx.emit_err(UnescapeError::BareCrRawString(err_span));
|
||||
dcx.emit_err(UnescapeError::BareCrRawString(err_span))
|
||||
}
|
||||
EscapeError::InvalidEscape => {
|
||||
let (c, span) = last_char();
|
||||
|
@ -161,16 +161,14 @@ pub(crate) fn emit_unescape_error(
|
|||
<https://doc.rust-lang.org/reference/tokens.html#literals>",
|
||||
);
|
||||
}
|
||||
diag.emit();
|
||||
}
|
||||
EscapeError::TooShortHexEscape => {
|
||||
dcx.emit_err(UnescapeError::TooShortHexEscape(err_span));
|
||||
diag.emit()
|
||||
}
|
||||
EscapeError::TooShortHexEscape => dcx.emit_err(UnescapeError::TooShortHexEscape(err_span)),
|
||||
EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
|
||||
let (c, span) = last_char();
|
||||
let is_hex = error == EscapeError::InvalidCharInHexEscape;
|
||||
let ch = escaped_char(c);
|
||||
dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
|
||||
dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch })
|
||||
}
|
||||
EscapeError::NonAsciiCharInByte => {
|
||||
let (c, span) = last_char();
|
||||
|
@ -213,23 +211,23 @@ pub(crate) fn emit_unescape_error(
|
|||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
err.emit()
|
||||
}
|
||||
EscapeError::OutOfRangeHexEscape => {
|
||||
dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span));
|
||||
dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span))
|
||||
}
|
||||
EscapeError::LeadingUnderscoreUnicodeEscape => {
|
||||
let (c, span) = last_char();
|
||||
dcx.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
|
||||
span,
|
||||
ch: escaped_char(c),
|
||||
});
|
||||
})
|
||||
}
|
||||
EscapeError::OverlongUnicodeEscape => {
|
||||
dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span));
|
||||
dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span))
|
||||
}
|
||||
EscapeError::UnclosedUnicodeEscape => {
|
||||
dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()));
|
||||
dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()))
|
||||
}
|
||||
EscapeError::NoBraceInUnicodeEscape => {
|
||||
let mut suggestion = "\\u{".to_owned();
|
||||
|
@ -248,23 +246,17 @@ pub(crate) fn emit_unescape_error(
|
|||
} else {
|
||||
(Some(err_span), NoBraceUnicodeSub::Help)
|
||||
};
|
||||
dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub });
|
||||
dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub })
|
||||
}
|
||||
EscapeError::UnicodeEscapeInByte => {
|
||||
dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span));
|
||||
dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span))
|
||||
}
|
||||
EscapeError::EmptyUnicodeEscape => {
|
||||
dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span));
|
||||
}
|
||||
EscapeError::ZeroChars => {
|
||||
dcx.emit_err(UnescapeError::ZeroChars(err_span));
|
||||
}
|
||||
EscapeError::LoneSlash => {
|
||||
dcx.emit_err(UnescapeError::LoneSlash(err_span));
|
||||
}
|
||||
EscapeError::NulInCStr => {
|
||||
dcx.emit_err(UnescapeError::NulInCStr { span: err_span });
|
||||
dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span))
|
||||
}
|
||||
EscapeError::ZeroChars => dcx.emit_err(UnescapeError::ZeroChars(err_span)),
|
||||
EscapeError::LoneSlash => dcx.emit_err(UnescapeError::LoneSlash(err_span)),
|
||||
EscapeError::NulInCStr => dcx.emit_err(UnescapeError::NulInCStr { span: err_span }),
|
||||
EscapeError::UnskippedWhitespaceWarning => {
|
||||
let (c, char_span) = last_char();
|
||||
dcx.emit_warn(UnescapeError::UnskippedWhitespace {
|
||||
|
@ -272,11 +264,13 @@ pub(crate) fn emit_unescape_error(
|
|||
ch: escaped_char(c),
|
||||
char_span,
|
||||
});
|
||||
return None;
|
||||
}
|
||||
EscapeError::MultipleSkippedLinesWarning => {
|
||||
dcx.emit_warn(UnescapeError::MultipleSkippedLinesWarning(err_span));
|
||||
return None;
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Pushes a character to a message string for error reporting
|
||||
|
|
|
@ -2140,12 +2140,12 @@ impl<'a> Parser<'a> {
|
|||
Err(err) => {
|
||||
let span = token.uninterpolated_span();
|
||||
self.bump();
|
||||
report_lit_error(self.sess, err, lit, span);
|
||||
let guar = report_lit_error(self.sess, err, lit, span);
|
||||
// Pack possible quotes and prefixes from the original literal into
|
||||
// the error literal's symbol so they can be pretty-printed faithfully.
|
||||
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
||||
let symbol = Symbol::intern(&suffixless_lit.to_string());
|
||||
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
|
||||
let lit = token::Lit::new(token::Err(guar), symbol, lit.suffix);
|
||||
Some(
|
||||
MetaItemLit::from_token_lit(lit, span)
|
||||
.unwrap_or_else(|_| unreachable!()),
|
||||
|
|
|
@ -1459,7 +1459,7 @@ impl<'a> Parser<'a> {
|
|||
match self.parse_str_lit() {
|
||||
Ok(str_lit) => Some(str_lit),
|
||||
Err(Some(lit)) => match lit.kind {
|
||||
ast::LitKind::Err => None,
|
||||
ast::LitKind::Err(_) => None,
|
||||
_ => {
|
||||
self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
|
||||
None
|
||||
|
|
|
@ -70,11 +70,11 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
|
|||
}
|
||||
}
|
||||
Err(err) => {
|
||||
report_lit_error(sess, err, token_lit, expr.span);
|
||||
let guar = report_lit_error(sess, err, token_lit, expr.span);
|
||||
let lit = ast::MetaItemLit {
|
||||
symbol: token_lit.symbol,
|
||||
suffix: token_lit.suffix,
|
||||
kind: ast::LitKind::Err,
|
||||
kind: ast::LitKind::Err(guar),
|
||||
span: expr.span,
|
||||
};
|
||||
MetaItemKind::NameValue(lit)
|
||||
|
|
|
@ -3,7 +3,8 @@ use std::num::NonZeroU32;
|
|||
use rustc_ast::token;
|
||||
use rustc_ast::util::literal::LitError;
|
||||
use rustc_errors::{
|
||||
codes::*, DiagCtxt, DiagnosticBuilder, DiagnosticMessage, IntoDiagnostic, Level, MultiSpan,
|
||||
codes::*, DiagCtxt, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed, IntoDiagnostic,
|
||||
Level, MultiSpan,
|
||||
};
|
||||
use rustc_macros::Diagnostic;
|
||||
use rustc_span::{Span, Symbol};
|
||||
|
@ -344,7 +345,12 @@ pub(crate) struct BinaryFloatLiteralNotSupported {
|
|||
pub span: Span,
|
||||
}
|
||||
|
||||
pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span: Span) {
|
||||
pub fn report_lit_error(
|
||||
sess: &ParseSess,
|
||||
err: LitError,
|
||||
lit: token::Lit,
|
||||
span: Span,
|
||||
) -> ErrorGuaranteed {
|
||||
// Checks if `s` looks like i32 or u1234 etc.
|
||||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||
s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
||||
|
@ -372,47 +378,37 @@ pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span:
|
|||
valid.then(|| format!("0{}{}", base_char.to_ascii_lowercase(), &suffix[1..]))
|
||||
}
|
||||
|
||||
let token::Lit { kind, symbol, suffix, .. } = lit;
|
||||
let dcx = &sess.dcx;
|
||||
match err {
|
||||
// `LexerError` is an error, but it was already reported
|
||||
// by lexer, so here we don't report it the second time.
|
||||
LitError::LexerError => {}
|
||||
LitError::InvalidSuffix => {
|
||||
if let Some(suffix) = suffix {
|
||||
dcx.emit_err(InvalidLiteralSuffix { span, kind: kind.descr(), suffix });
|
||||
}
|
||||
LitError::InvalidSuffix(suffix) => {
|
||||
dcx.emit_err(InvalidLiteralSuffix { span, kind: lit.kind.descr(), suffix })
|
||||
}
|
||||
LitError::InvalidIntSuffix => {
|
||||
let suf = suffix.expect("suffix error with no suffix");
|
||||
let suf = suf.as_str();
|
||||
LitError::InvalidIntSuffix(suffix) => {
|
||||
let suf = suffix.as_str();
|
||||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
dcx.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
|
||||
} else if let Some(fixed) = fix_base_capitalisation(symbol.as_str(), suf) {
|
||||
dcx.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
|
||||
dcx.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() })
|
||||
} else if let Some(fixed) = fix_base_capitalisation(lit.symbol.as_str(), suf) {
|
||||
dcx.emit_err(InvalidNumLiteralBasePrefix { span, fixed })
|
||||
} else {
|
||||
dcx.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
|
||||
dcx.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() })
|
||||
}
|
||||
}
|
||||
LitError::InvalidFloatSuffix => {
|
||||
let suf = suffix.expect("suffix error with no suffix");
|
||||
let suf = suf.as_str();
|
||||
LitError::InvalidFloatSuffix(suffix) => {
|
||||
let suf = suffix.as_str();
|
||||
if looks_like_width_suffix(&['f'], suf) {
|
||||
// If it looks like a width, try to be helpful.
|
||||
dcx.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
|
||||
dcx.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() })
|
||||
} else {
|
||||
dcx.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
|
||||
dcx.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() })
|
||||
}
|
||||
}
|
||||
LitError::NonDecimalFloat(base) => {
|
||||
match base {
|
||||
16 => dcx.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
||||
8 => dcx.emit_err(OctalFloatLiteralNotSupported { span }),
|
||||
2 => dcx.emit_err(BinaryFloatLiteralNotSupported { span }),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
LitError::NonDecimalFloat(base) => match base {
|
||||
16 => dcx.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
||||
8 => dcx.emit_err(OctalFloatLiteralNotSupported { span }),
|
||||
2 => dcx.emit_err(BinaryFloatLiteralNotSupported { span }),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
LitError::IntTooLarge(base) => {
|
||||
let max = u128::MAX;
|
||||
let limit = match base {
|
||||
|
@ -421,7 +417,7 @@ pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span:
|
|||
16 => format!("{max:#x}"),
|
||||
_ => format!("{max}"),
|
||||
};
|
||||
dcx.emit_err(IntLiteralTooLarge { span, limit });
|
||||
dcx.emit_err(IntLiteralTooLarge { span, limit })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue