1
Fork 0

Fix uninlined_format_args for some compiler crates

Convert all the crates that have had their diagnostic migration
completed (except save_analysis because that will be deleted soon and
apfloat because of the licensing problem).
This commit is contained in:
nils 2022-12-19 10:31:55 +01:00 committed by Nilstrieb
parent 1d284af117
commit fd7a159710
91 changed files with 287 additions and 329 deletions

View file

@ -2170,10 +2170,10 @@ impl fmt::Display for InlineAsmTemplatePiece {
Ok(()) Ok(())
} }
Self::Placeholder { operand_idx, modifier: Some(modifier), .. } => { Self::Placeholder { operand_idx, modifier: Some(modifier), .. } => {
write!(f, "{{{}:{}}}", operand_idx, modifier) write!(f, "{{{operand_idx}:{modifier}}}")
} }
Self::Placeholder { operand_idx, modifier: None, .. } => { Self::Placeholder { operand_idx, modifier: None, .. } => {
write!(f, "{{{}}}", operand_idx) write!(f, "{{{operand_idx}}}")
} }
} }
} }
@ -2185,7 +2185,7 @@ impl InlineAsmTemplatePiece {
use fmt::Write; use fmt::Write;
let mut out = String::new(); let mut out = String::new();
for p in s.iter() { for p in s.iter() {
let _ = write!(out, "{}", p); let _ = write!(out, "{p}");
} }
out out
} }

View file

@ -214,7 +214,7 @@ impl HasTokens for Attribute {
match &self.kind { match &self.kind {
AttrKind::Normal(normal) => normal.tokens.as_ref(), AttrKind::Normal(normal) => normal.tokens.as_ref(),
kind @ AttrKind::DocComment(..) => { kind @ AttrKind::DocComment(..) => {
panic!("Called tokens on doc comment attr {:?}", kind) panic!("Called tokens on doc comment attr {kind:?}")
} }
} }
} }
@ -222,7 +222,7 @@ impl HasTokens for Attribute {
Some(match &mut self.kind { Some(match &mut self.kind {
AttrKind::Normal(normal) => &mut normal.tokens, AttrKind::Normal(normal) => &mut normal.tokens,
kind @ AttrKind::DocComment(..) => { kind @ AttrKind::DocComment(..) => {
panic!("Called tokens_mut on doc comment attr {:?}", kind) panic!("Called tokens_mut on doc comment attr {kind:?}")
} }
}) })
} }

View file

@ -310,7 +310,7 @@ impl Attribute {
AttrKind::Normal(normal) => normal AttrKind::Normal(normal) => normal
.tokens .tokens
.as_ref() .as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self)) .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
.to_attr_token_stream() .to_attr_token_stream()
.to_tokenstream(), .to_tokenstream(),
&AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token( &AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(

View file

@ -9,8 +9,8 @@ pub enum AllocatorKind {
impl AllocatorKind { impl AllocatorKind {
pub fn fn_name(&self, base: Symbol) -> String { pub fn fn_name(&self, base: Symbol) -> String {
match *self { match *self {
AllocatorKind::Global => format!("__rg_{}", base), AllocatorKind::Global => format!("__rg_{base}"),
AllocatorKind::Default => format!("__rdl_{}", base), AllocatorKind::Default => format!("__rdl_{base}"),
} }
} }
} }

View file

@ -125,27 +125,27 @@ impl fmt::Display for Lit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Lit { kind, symbol, suffix } = *self; let Lit { kind, symbol, suffix } = *self;
match kind { match kind {
Byte => write!(f, "b'{}'", symbol)?, Byte => write!(f, "b'{symbol}'")?,
Char => write!(f, "'{}'", symbol)?, Char => write!(f, "'{symbol}'")?,
Str => write!(f, "\"{}\"", symbol)?, Str => write!(f, "\"{symbol}\"")?,
StrRaw(n) => write!( StrRaw(n) => write!(
f, f,
"r{delim}\"{string}\"{delim}", "r{delim}\"{string}\"{delim}",
delim = "#".repeat(n as usize), delim = "#".repeat(n as usize),
string = symbol string = symbol
)?, )?,
ByteStr => write!(f, "b\"{}\"", symbol)?, ByteStr => write!(f, "b\"{symbol}\"")?,
ByteStrRaw(n) => write!( ByteStrRaw(n) => write!(
f, f,
"br{delim}\"{string}\"{delim}", "br{delim}\"{string}\"{delim}",
delim = "#".repeat(n as usize), delim = "#".repeat(n as usize),
string = symbol string = symbol
)?, )?,
Integer | Float | Bool | Err => write!(f, "{}", symbol)?, Integer | Float | Bool | Err => write!(f, "{symbol}")?,
} }
if let Some(suffix) = suffix { if let Some(suffix) = suffix {
write!(f, "{}", suffix)?; write!(f, "{suffix}")?;
} }
Ok(()) Ok(())
@ -756,7 +756,7 @@ impl Token {
_ => return None, _ => return None,
}, },
SingleQuote => match joint.kind { SingleQuote => match joint.kind {
Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))), Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))),
_ => return None, _ => return None,
}, },

View file

@ -258,8 +258,7 @@ impl AttrTokenStream {
assert!( assert!(
found, found,
"Failed to find trailing delimited group in: {:?}", "Failed to find trailing delimited group in: {target_tokens:?}"
target_tokens
); );
} }
let mut flat: SmallVec<[_; 1]> = SmallVec::new(); let mut flat: SmallVec<[_; 1]> = SmallVec::new();

View file

@ -168,7 +168,7 @@ impl fmt::Display for LitKind {
match *self { match *self {
LitKind::Byte(b) => { LitKind::Byte(b) => {
let b: String = ascii::escape_default(b).map(Into::<char>::into).collect(); let b: String = ascii::escape_default(b).map(Into::<char>::into).collect();
write!(f, "b'{}'", b)?; write!(f, "b'{b}'")?;
} }
LitKind::Char(ch) => write!(f, "'{}'", escape_char_symbol(ch))?, LitKind::Char(ch) => write!(f, "'{}'", escape_char_symbol(ch))?,
LitKind::Str(sym, StrStyle::Cooked) => write!(f, "\"{}\"", escape_string_symbol(sym))?, LitKind::Str(sym, StrStyle::Cooked) => write!(f, "\"{}\"", escape_string_symbol(sym))?,
@ -192,7 +192,7 @@ impl fmt::Display for LitKind {
)?; )?;
} }
LitKind::Int(n, ty) => { LitKind::Int(n, ty) => {
write!(f, "{}", n)?; write!(f, "{n}")?;
match ty { match ty {
ast::LitIntType::Unsigned(ty) => write!(f, "{}", ty.name())?, ast::LitIntType::Unsigned(ty) => write!(f, "{}", ty.name())?,
ast::LitIntType::Signed(ty) => write!(f, "{}", ty.name())?, ast::LitIntType::Signed(ty) => write!(f, "{}", ty.name())?,
@ -200,7 +200,7 @@ impl fmt::Display for LitKind {
} }
} }
LitKind::Float(symbol, ty) => { LitKind::Float(symbol, ty) => {
write!(f, "{}", symbol)?; write!(f, "{symbol}")?;
match ty { match ty {
ast::LitFloatType::Suffixed(ty) => write!(f, "{}", ty.name())?, ast::LitFloatType::Suffixed(ty) => write!(f, "{}", ty.name())?,
ast::LitFloatType::Unsuffixed => {} ast::LitFloatType::Unsuffixed => {}

View file

@ -104,7 +104,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
Err(supported_abis) => { Err(supported_abis) => {
let mut abis = format!("`{}`", supported_abis[0]); let mut abis = format!("`{}`", supported_abis[0]);
for m in &supported_abis[1..] { for m in &supported_abis[1..] {
let _ = write!(abis, ", `{}`", m); let _ = write!(abis, ", `{m}`");
} }
self.tcx.sess.emit_err(InvalidAbiClobberAbi { self.tcx.sess.emit_err(InvalidAbiClobberAbi {
abi_span: *abi_span, abi_span: *abi_span,
@ -262,7 +262,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let sub = if !valid_modifiers.is_empty() { let sub = if !valid_modifiers.is_empty() {
let mut mods = format!("`{}`", valid_modifiers[0]); let mut mods = format!("`{}`", valid_modifiers[0]);
for m in &valid_modifiers[1..] { for m in &valid_modifiers[1..] {
let _ = write!(mods, ", `{}`", m); let _ = write!(mods, ", `{m}`");
} }
InvalidAsmTemplateModifierRegClassSub::SupportModifier { InvalidAsmTemplateModifierRegClassSub::SupportModifier {
class_name: class.name(), class_name: class.name(),

View file

@ -1051,7 +1051,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
} }
_ => { _ => {
// Replace the ident for bindings that aren't simple. // Replace the ident for bindings that aren't simple.
let name = format!("__arg{}", index); let name = format!("__arg{index}");
let ident = Ident::from_str(&name); let ident = Ident::from_str(&name);
(ident, false) (ident, false)

View file

@ -296,7 +296,7 @@ impl std::fmt::Display for ImplTraitPosition {
ImplTraitPosition::ImplReturn => "`impl` method return", ImplTraitPosition::ImplReturn => "`impl` method return",
}; };
write!(f, "{}", name) write!(f, "{name}")
} }
} }
@ -503,7 +503,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn orig_local_def_id(&self, node: NodeId) -> LocalDefId { fn orig_local_def_id(&self, node: NodeId) -> LocalDefId {
self.orig_opt_local_def_id(node) self.orig_opt_local_def_id(node)
.unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node)) .unwrap_or_else(|| panic!("no entry for node id: `{node:?}`"))
} }
/// Given the id of some node in the AST, finds the `LocalDefId` associated with it by the name /// Given the id of some node in the AST, finds the `LocalDefId` associated with it by the name
@ -524,7 +524,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} }
fn local_def_id(&self, node: NodeId) -> LocalDefId { fn local_def_id(&self, node: NodeId) -> LocalDefId {
self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node)) self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{node:?}`"))
} }
/// Get the previously recorded `to` local def id given the `from` local def id, obtained using /// Get the previously recorded `to` local def id given the `from` local def id, obtained using
@ -2197,7 +2197,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_trait_ref(&mut self, p: &TraitRef, itctx: &ImplTraitContext) -> hir::TraitRef<'hir> { fn lower_trait_ref(&mut self, p: &TraitRef, itctx: &ImplTraitContext) -> hir::TraitRef<'hir> {
let path = match self.lower_qpath(p.ref_id, &None, &p.path, ParamMode::Explicit, itctx) { let path = match self.lower_qpath(p.ref_id, &None, &p.path, ParamMode::Explicit, itctx) {
hir::QPath::Resolved(None, path) => path, hir::QPath::Resolved(None, path) => path,
qpath => panic!("lower_trait_ref: unexpected QPath `{:?}`", qpath), qpath => panic!("lower_trait_ref: unexpected QPath `{qpath:?}`"),
}; };
hir::TraitRef { path, hir_ref_id: self.lower_node_id(p.ref_id) } hir::TraitRef { path, hir_ref_id: self.lower_node_id(p.ref_id) }
} }

View file

@ -191,23 +191,23 @@ fn doc_comment_to_string(
data: Symbol, data: Symbol,
) -> String { ) -> String {
match (comment_kind, attr_style) { match (comment_kind, attr_style) {
(CommentKind::Line, ast::AttrStyle::Outer) => format!("///{}", data), (CommentKind::Line, ast::AttrStyle::Outer) => format!("///{data}"),
(CommentKind::Line, ast::AttrStyle::Inner) => format!("//!{}", data), (CommentKind::Line, ast::AttrStyle::Inner) => format!("//!{data}"),
(CommentKind::Block, ast::AttrStyle::Outer) => format!("/**{}*/", data), (CommentKind::Block, ast::AttrStyle::Outer) => format!("/**{data}*/"),
(CommentKind::Block, ast::AttrStyle::Inner) => format!("/*!{}*/", data), (CommentKind::Block, ast::AttrStyle::Inner) => format!("/*!{data}*/"),
} }
} }
pub fn literal_to_string(lit: token::Lit) -> String { pub fn literal_to_string(lit: token::Lit) -> String {
let token::Lit { kind, symbol, suffix } = lit; let token::Lit { kind, symbol, suffix } = lit;
let mut out = match kind { let mut out = match kind {
token::Byte => format!("b'{}'", symbol), token::Byte => format!("b'{symbol}'"),
token::Char => format!("'{}'", symbol), token::Char => format!("'{symbol}'"),
token::Str => format!("\"{}\"", symbol), token::Str => format!("\"{symbol}\""),
token::StrRaw(n) => { token::StrRaw(n) => {
format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol) format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol)
} }
token::ByteStr => format!("b\"{}\"", symbol), token::ByteStr => format!("b\"{symbol}\""),
token::ByteStrRaw(n) => { token::ByteStrRaw(n) => {
format!("br{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol) format!("br{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol)
} }

View file

@ -411,9 +411,9 @@ impl<'a> State<'a> {
ast::VisibilityKind::Restricted { path, shorthand, .. } => { ast::VisibilityKind::Restricted { path, shorthand, .. } => {
let path = Self::to_string(|s| s.print_path(path, false, 0)); let path = Self::to_string(|s| s.print_path(path, false, 0));
if *shorthand && (path == "crate" || path == "self" || path == "super") { if *shorthand && (path == "crate" || path == "self" || path == "super") {
self.word_nbsp(format!("pub({})", path)) self.word_nbsp(format!("pub({path})"))
} else { } else {
self.word_nbsp(format!("pub(in {})", path)) self.word_nbsp(format!("pub(in {path})"))
} }
} }
ast::VisibilityKind::Inherited => {} ast::VisibilityKind::Inherited => {}

View file

@ -619,7 +619,7 @@ fn try_gate_cfg(name: Symbol, span: Span, sess: &ParseSess, features: Option<&Fe
fn gate_cfg(gated_cfg: &GatedCfg, cfg_span: Span, sess: &ParseSess, features: &Features) { fn gate_cfg(gated_cfg: &GatedCfg, cfg_span: Span, sess: &ParseSess, features: &Features) {
let (cfg, feature, has_feature) = gated_cfg; let (cfg, feature, has_feature) = gated_cfg;
if !has_feature(features) && !cfg_span.allows_unstable(*feature) { if !has_feature(features) && !cfg_span.allows_unstable(*feature) {
let explain = format!("`cfg({})` is experimental and subject to change", cfg); let explain = format!("`cfg({cfg})` is experimental and subject to change");
feature_err(sess, *feature, cfg_span, &explain).emit(); feature_err(sess, *feature, cfg_span, &explain).emit();
} }
} }
@ -975,7 +975,7 @@ pub fn find_repr_attrs(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
} }
pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> { pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
assert!(attr.has_name(sym::repr), "expected `#[repr(..)]`, found: {:?}", attr); assert!(attr.has_name(sym::repr), "expected `#[repr(..)]`, found: {attr:?}");
use ReprAttr::*; use ReprAttr::*;
let mut acc = Vec::new(); let mut acc = Vec::new();
let diagnostic = &sess.parse_sess.span_diagnostic; let diagnostic = &sess.parse_sess.span_diagnostic;

View file

@ -51,7 +51,7 @@ pub(crate) struct UnknownMetaItem<'a> {
// Manual implementation to be able to format `expected` items correctly. // Manual implementation to be able to format `expected` items correctly.
impl<'a> IntoDiagnostic<'a> for UnknownMetaItem<'_> { impl<'a> IntoDiagnostic<'a> for UnknownMetaItem<'_> {
fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, ErrorGuaranteed> { fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
let expected = self.expected.iter().map(|name| format!("`{}`", name)).collect::<Vec<_>>(); let expected = self.expected.iter().map(|name| format!("`{name}`")).collect::<Vec<_>>();
let mut diag = handler.struct_span_err_with_code( let mut diag = handler.struct_span_err_with_code(
self.span, self.span,
fluent::attr_unknown_meta_item, fluent::attr_unknown_meta_item,

View file

@ -277,12 +277,12 @@ impl<Node: Idx> Dominators<Node> {
} }
pub fn immediate_dominator(&self, node: Node) -> Node { pub fn immediate_dominator(&self, node: Node) -> Node {
assert!(self.is_reachable(node), "node {:?} is not reachable", node); assert!(self.is_reachable(node), "node {node:?} is not reachable");
self.immediate_dominators[node].unwrap() self.immediate_dominators[node].unwrap()
} }
pub fn dominators(&self, node: Node) -> Iter<'_, Node> { pub fn dominators(&self, node: Node) -> Iter<'_, Node> {
assert!(self.is_reachable(node), "node {:?} is not reachable", node); assert!(self.is_reachable(node), "node {node:?} is not reachable");
Iter { dominators: self, node: Some(node) } Iter { dominators: self, node: Some(node) }
} }

View file

@ -233,10 +233,9 @@ where
.map(G::Node::new) .map(G::Node::new)
.map(|node| match this.start_walk_from(node) { .map(|node| match this.start_walk_from(node) {
WalkReturn::Complete { scc_index } => scc_index, WalkReturn::Complete { scc_index } => scc_index,
WalkReturn::Cycle { min_depth } => panic!( WalkReturn::Cycle { min_depth } => {
"`start_walk_node({:?})` returned cycle with depth {:?}", panic!("`start_walk_node({node:?})` returned cycle with depth {min_depth:?}")
node, min_depth }
),
}) })
.collect(); .collect();
@ -272,8 +271,7 @@ where
NodeState::NotVisited => return None, NodeState::NotVisited => return None,
NodeState::InCycleWith { parent } => panic!( NodeState::InCycleWith { parent } => panic!(
"`find_state` returned `InCycleWith({:?})`, which ought to be impossible", "`find_state` returned `InCycleWith({parent:?})`, which ought to be impossible"
parent
), ),
}) })
} }
@ -369,7 +367,7 @@ where
previous_node = previous; previous_node = previous;
} }
// Only InCycleWith nodes were added to the reverse linked list. // Only InCycleWith nodes were added to the reverse linked list.
other => panic!("Invalid previous link while compressing cycle: {:?}", other), other => panic!("Invalid previous link while compressing cycle: {other:?}"),
} }
debug!("find_state: parent_state = {:?}", node_state); debug!("find_state: parent_state = {:?}", node_state);
@ -394,7 +392,7 @@ where
// NotVisited can not be part of a cycle since it should // NotVisited can not be part of a cycle since it should
// have instead gotten explored. // have instead gotten explored.
NodeState::NotVisited | NodeState::InCycleWith { .. } => { NodeState::NotVisited | NodeState::InCycleWith { .. } => {
panic!("invalid parent state: {:?}", node_state) panic!("invalid parent state: {node_state:?}")
} }
} }
} }

View file

@ -30,7 +30,7 @@ impl<O: ForestObligation> ObligationForest<O> {
let counter = COUNTER.fetch_add(1, Ordering::AcqRel); let counter = COUNTER.fetch_add(1, Ordering::AcqRel);
let file_path = dir.as_ref().join(format!("{:010}_{}.gv", counter, description)); let file_path = dir.as_ref().join(format!("{counter:010}_{description}.gv"));
let mut gv_file = BufWriter::new(File::create(file_path).unwrap()); let mut gv_file = BufWriter::new(File::create(file_path).unwrap());
@ -47,7 +47,7 @@ impl<'a, O: ForestObligation + 'a> dot::Labeller<'a> for &'a ObligationForest<O>
} }
fn node_id(&self, index: &Self::Node) -> dot::Id<'_> { fn node_id(&self, index: &Self::Node) -> dot::Id<'_> {
dot::Id::new(format!("obligation_{}", index)).unwrap() dot::Id::new(format!("obligation_{index}")).unwrap()
} }
fn node_label(&self, index: &Self::Node) -> dot::LabelText<'_> { fn node_label(&self, index: &Self::Node) -> dot::LabelText<'_> {

View file

@ -545,7 +545,7 @@ impl SelfProfiler {
// length can behave as a source of entropy for heap addresses, when // length can behave as a source of entropy for heap addresses, when
// ASLR is disabled and the heap is otherwise determinic. // ASLR is disabled and the heap is otherwise determinic.
let pid: u32 = process::id(); let pid: u32 = process::id();
let filename = format!("{}-{:07}.rustc_profile", crate_name, pid); let filename = format!("{crate_name}-{pid:07}.rustc_profile");
let path = output_directory.join(&filename); let path = output_directory.join(&filename);
let profiler = let profiler =
Profiler::with_counter(&path, measureme::counters::Counter::by_name(counter_name)?)?; Profiler::with_counter(&path, measureme::counters::Counter::by_name(counter_name)?)?;

View file

@ -30,7 +30,7 @@ impl SmallCStr {
SmallVec::from_vec(data) SmallVec::from_vec(data)
}; };
if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) { if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) {
panic!("The string \"{}\" cannot be converted into a CStr: {}", s, e); panic!("The string \"{s}\" cannot be converted into a CStr: {e}");
} }
SmallCStr { data } SmallCStr { data }
} }
@ -39,7 +39,7 @@ impl SmallCStr {
pub fn new_with_nul(s: &str) -> SmallCStr { pub fn new_with_nul(s: &str) -> SmallCStr {
let b = s.as_bytes(); let b = s.as_bytes();
if let Err(e) = ffi::CStr::from_bytes_with_nul(b) { if let Err(e) = ffi::CStr::from_bytes_with_nul(b) {
panic!("The string \"{}\" cannot be converted into a CStr: {}", s, e); panic!("The string \"{s}\" cannot be converted into a CStr: {e}");
} }
SmallCStr { data: SmallVec::from_slice(s.as_bytes()) } SmallCStr { data: SmallVec::from_slice(s.as_bytes()) }
} }
@ -74,7 +74,7 @@ impl<'a> FromIterator<&'a str> for SmallCStr {
iter.into_iter().flat_map(|s| s.as_bytes()).copied().collect::<SmallVec<_>>(); iter.into_iter().flat_map(|s| s.as_bytes()).copied().collect::<SmallVec<_>>();
data.push(0); data.push(0);
if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) { if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) {
panic!("The iterator {:?} cannot be converted into a CStr: {}", data, e); panic!("The iterator {data:?} cannot be converted into a CStr: {e}");
} }
Self { data } Self { data }
} }

View file

@ -71,8 +71,7 @@ where
// This should return just one element, otherwise it's a bug // This should return just one element, otherwise it's a bug
assert!( assert!(
filter.next().is_none(), filter.next().is_none(),
"Collection {:#?} should have just one matching element", "Collection {self:#?} should have just one matching element"
self
); );
Some(value) Some(value)
} }

View file

@ -25,7 +25,7 @@ pub fn arg_expand_all(at_args: &[String]) -> Vec<String> {
Ok(arg) => args.extend(arg), Ok(arg) => args.extend(arg),
Err(err) => rustc_session::early_error( Err(err) => rustc_session::early_error(
rustc_session::config::ErrorOutputType::default(), rustc_session::config::ErrorOutputType::default(),
&format!("Failed to load argument file: {}", err), &format!("Failed to load argument file: {err}"),
), ),
} }
} }
@ -42,8 +42,8 @@ impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Error::Utf8Error(None) => write!(fmt, "Utf8 error"), Error::Utf8Error(None) => write!(fmt, "Utf8 error"),
Error::Utf8Error(Some(path)) => write!(fmt, "Utf8 error in {}", path), Error::Utf8Error(Some(path)) => write!(fmt, "Utf8 error in {path}"),
Error::IOError(path, err) => write!(fmt, "IO Error: {}: {}", path, err), Error::IOError(path, err) => write!(fmt, "IO Error: {path}: {err}"),
} }
} }
} }

View file

@ -486,11 +486,8 @@ impl Compilation {
fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) { fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
let upper_cased_code = code.to_ascii_uppercase(); let upper_cased_code = code.to_ascii_uppercase();
let normalised = if upper_cased_code.starts_with('E') { let normalised =
upper_cased_code if upper_cased_code.starts_with('E') { upper_cased_code } else { format!("E{code:0>4}") };
} else {
format!("E{0:0>4}", code)
};
match registry.try_find_description(&normalised) { match registry.try_find_description(&normalised) {
Ok(Some(description)) => { Ok(Some(description)) => {
let mut is_in_code_block = false; let mut is_in_code_block = false;
@ -513,14 +510,14 @@ fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
if io::stdout().is_terminal() { if io::stdout().is_terminal() {
show_content_with_pager(&text); show_content_with_pager(&text);
} else { } else {
print!("{}", text); print!("{text}");
} }
} }
Ok(None) => { Ok(None) => {
early_error(output, &format!("no extended information for {}", code)); early_error(output, &format!("no extended information for {code}"));
} }
Err(InvalidErrorCode) => { Err(InvalidErrorCode) => {
early_error(output, &format!("{} is not a valid error code", code)); early_error(output, &format!("{code} is not a valid error code"));
} }
} }
} }
@ -552,7 +549,7 @@ fn show_content_with_pager(content: &str) {
// If pager fails for whatever reason, we should still print the content // If pager fails for whatever reason, we should still print the content
// to standard output // to standard output
if fallback_to_println { if fallback_to_println {
print!("{}", content); print!("{content}");
} }
} }
@ -672,7 +669,7 @@ fn print_crate_info(
); );
let id = rustc_session::output::find_crate_name(sess, attrs, input); let id = rustc_session::output::find_crate_name(sess, attrs, input);
if *req == PrintRequest::CrateName { if *req == PrintRequest::CrateName {
println!("{}", id); println!("{id}");
continue; continue;
} }
let crate_types = collect_crate_types(sess, attrs); let crate_types = collect_crate_types(sess, attrs);
@ -704,7 +701,7 @@ fn print_crate_info(
} }
if let Some(value) = value { if let Some(value) = value {
Some(format!("{}=\"{}\"", name, value)) Some(format!("{name}=\"{value}\""))
} else { } else {
Some(name.to_string()) Some(name.to_string())
} }
@ -713,7 +710,7 @@ fn print_crate_info(
cfgs.sort(); cfgs.sort();
for cfg in cfgs { for cfg in cfgs {
println!("{}", cfg); println!("{cfg}");
} }
} }
CallingConventions => { CallingConventions => {
@ -739,7 +736,7 @@ fn print_crate_info(
let stable = sess.target.options.supported_split_debuginfo.contains(split); let stable = sess.target.options.supported_split_debuginfo.contains(split);
let unstable_ok = sess.unstable_options(); let unstable_ok = sess.unstable_options();
if stable || unstable_ok { if stable || unstable_ok {
println!("{}", split); println!("{split}");
} }
} }
} }
@ -776,14 +773,14 @@ pub fn version_at_macro_invocation(
) { ) {
let verbose = matches.opt_present("verbose"); let verbose = matches.opt_present("verbose");
println!("{} {}", binary, version); println!("{binary} {version}");
if verbose { if verbose {
println!("binary: {}", binary); println!("binary: {binary}");
println!("commit-hash: {}", commit_hash); println!("commit-hash: {commit_hash}");
println!("commit-date: {}", commit_date); println!("commit-date: {commit_date}");
println!("host: {}", config::host_triple()); println!("host: {}", config::host_triple());
println!("release: {}", release); println!("release: {release}");
let debug_flags = matches.opt_strs("Z"); let debug_flags = matches.opt_strs("Z");
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend=")); let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
@ -1037,7 +1034,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
.map(|&(name, ..)| ('C', name)) .map(|&(name, ..)| ('C', name))
.chain(Z_OPTIONS.iter().map(|&(name, ..)| ('Z', name))) .chain(Z_OPTIONS.iter().map(|&(name, ..)| ('Z', name)))
.find(|&(_, name)| *opt == name.replace('_', "-")) .find(|&(_, name)| *opt == name.replace('_', "-"))
.map(|(flag, _)| format!("{}. Did you mean `-{} {}`?", e, flag, opt)), .map(|(flag, _)| format!("{e}. Did you mean `-{flag} {opt}`?")),
_ => None, _ => None,
}; };
early_error(ErrorOutputType::default(), &msg.unwrap_or_else(|| e.to_string())); early_error(ErrorOutputType::default(), &msg.unwrap_or_else(|| e.to_string()));
@ -1148,7 +1145,7 @@ fn extra_compiler_flags() -> Option<(Vec<String>, bool)> {
} else { } else {
result.push(a.to_string()); result.push(a.to_string());
match ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.iter().find(|s| option == **s) { match ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.iter().find(|s| option == **s) {
Some(s) => result.push(format!("{}=[REDACTED]", s)), Some(s) => result.push(format!("{s}=[REDACTED]")),
None => result.push(content), None => result.push(content),
} }
} }
@ -1246,7 +1243,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
let mut xs: Vec<Cow<'static, str>> = vec![ let mut xs: Vec<Cow<'static, str>> = vec![
"the compiler unexpectedly panicked. this is a bug.".into(), "the compiler unexpectedly panicked. this is a bug.".into(),
format!("we would appreciate a bug report: {}", bug_report_url).into(), format!("we would appreciate a bug report: {bug_report_url}").into(),
format!( format!(
"rustc {} running on {}", "rustc {} running on {}",
util::version_str!().unwrap_or("unknown_version"), util::version_str!().unwrap_or("unknown_version"),
@ -1379,7 +1376,7 @@ pub fn main() -> ! {
arg.into_string().unwrap_or_else(|arg| { arg.into_string().unwrap_or_else(|arg| {
early_error( early_error(
ErrorOutputType::default(), ErrorOutputType::default(),
&format!("argument {} is not valid Unicode: {:?}", i, arg), &format!("argument {i} is not valid Unicode: {arg:?}"),
) )
}) })
}) })

View file

@ -360,7 +360,7 @@ fn get_source(input: &Input, sess: &Session) -> (String, FileName) {
fn write_or_print(out: &str, ofile: Option<&Path>, sess: &Session) { fn write_or_print(out: &str, ofile: Option<&Path>, sess: &Session) {
match ofile { match ofile {
None => print!("{}", out), None => print!("{out}"),
Some(p) => { Some(p) => {
if let Err(e) = std::fs::write(p, out) { if let Err(e) = std::fs::write(p, out) {
sess.emit_fatal(UnprettyDumpFail { sess.emit_fatal(UnprettyDumpFail {
@ -402,7 +402,7 @@ pub fn print_after_parsing(
} }
AstTree(PpAstTreeMode::Normal) => { AstTree(PpAstTreeMode::Normal) => {
debug!("pretty printing AST tree"); debug!("pretty printing AST tree");
format!("{:#?}", krate) format!("{krate:#?}")
} }
_ => unreachable!(), _ => unreachable!(),
}; };
@ -446,7 +446,7 @@ pub fn print_after_hir_lowering<'tcx>(
AstTree(PpAstTreeMode::Expanded) => { AstTree(PpAstTreeMode::Expanded) => {
debug!("pretty-printing expanded AST"); debug!("pretty-printing expanded AST");
format!("{:#?}", krate) format!("{krate:#?}")
} }
Hir(s) => call_with_pp_support_hir(&s, tcx, move |annotation, hir_map| { Hir(s) => call_with_pp_support_hir(&s, tcx, move |annotation, hir_map| {

View file

@ -70,7 +70,7 @@ impl std::fmt::Debug for AttributeGate {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self { match *self {
Self::Gated(ref stab, name, expl, _) => { Self::Gated(ref stab, name, expl, _) => {
write!(fmt, "Gated({:?}, {}, {})", stab, name, expl) write!(fmt, "Gated({stab:?}, {name}, {expl})")
} }
Self::Ungated => write!(fmt, "Ungated"), Self::Ungated => write!(fmt, "Ungated"),
} }

View file

@ -120,7 +120,7 @@ fn find_lang_feature_issue(feature: Symbol) -> Option<NonZeroU32> {
.find(|t| t.name == feature); .find(|t| t.name == feature);
match found { match found {
Some(found) => found.issue, Some(found) => found.issue,
None => panic!("feature `{}` is not declared anywhere", feature), None => panic!("feature `{feature}` is not declared anywhere"),
} }
} }
} }

View file

@ -516,7 +516,7 @@ impl<'a> LabelText<'a> {
match *self { match *self {
LabelStr(ref s) => format!("\"{}\"", s.escape_default()), LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)), EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)),
HtmlStr(ref s) => format!("<{}>", s), HtmlStr(ref s) => format!("<{s}>"),
} }
} }
@ -622,7 +622,7 @@ where
if let Some(fontname) = options.iter().find_map(|option| { if let Some(fontname) = options.iter().find_map(|option| {
if let RenderOption::Fontname(fontname) = option { Some(fontname) } else { None } if let RenderOption::Fontname(fontname) = option { Some(fontname) } else { None }
}) { }) {
font = format!(r#"fontname="{}""#, fontname); font = format!(r#"fontname="{fontname}""#);
graph_attrs.push(&font[..]); graph_attrs.push(&font[..]);
content_attrs.push(&font[..]); content_attrs.push(&font[..]);
} }
@ -635,8 +635,8 @@ where
if !(graph_attrs.is_empty() && content_attrs.is_empty()) { if !(graph_attrs.is_empty() && content_attrs.is_empty()) {
writeln!(w, r#" graph[{}];"#, graph_attrs.join(" "))?; writeln!(w, r#" graph[{}];"#, graph_attrs.join(" "))?;
let content_attrs_str = content_attrs.join(" "); let content_attrs_str = content_attrs.join(" ");
writeln!(w, r#" node[{}];"#, content_attrs_str)?; writeln!(w, r#" node[{content_attrs_str}];"#)?;
writeln!(w, r#" edge[{}];"#, content_attrs_str)?; writeln!(w, r#" edge[{content_attrs_str}];"#)?;
} }
let mut text = Vec::new(); let mut text = Vec::new();
@ -649,7 +649,7 @@ where
write!(text, "{}", id.as_slice()).unwrap(); write!(text, "{}", id.as_slice()).unwrap();
if !options.contains(&RenderOption::NoNodeLabels) { if !options.contains(&RenderOption::NoNodeLabels) {
write!(text, "[label={}]", escaped).unwrap(); write!(text, "[label={escaped}]").unwrap();
} }
let style = g.node_style(n); let style = g.node_style(n);
@ -678,7 +678,7 @@ where
write!(text, "{} -> {}", source_id.as_slice(), target_id.as_slice()).unwrap(); write!(text, "{} -> {}", source_id.as_slice(), target_id.as_slice()).unwrap();
if !options.contains(&RenderOption::NoEdgeLabels) { if !options.contains(&RenderOption::NoEdgeLabels) {
write!(text, "[label={}]", escaped_label).unwrap(); write!(text, "[label={escaped_label}]").unwrap();
} }
let style = g.edge_style(e); let style = g.edge_style(e);

View file

@ -597,8 +597,7 @@ impl<Id> Res<Id> {
where where
Id: Debug, Id: Debug,
{ {
self.opt_def_id() self.opt_def_id().unwrap_or_else(|| panic!("attempted .def_id() on invalid res: {self:?}"))
.unwrap_or_else(|| panic!("attempted .def_id() on invalid res: {:?}", self))
} }
/// Return `Some(..)` with the `DefId` of this `Res` if it has a ID, else `None`. /// Return `Some(..)` with the `DefId` of this `Res` if it has a ID, else `None`.

View file

@ -53,9 +53,8 @@ impl DefPathTable {
// //
// See the documentation for DefPathHash for more information. // See the documentation for DefPathHash for more information.
panic!( panic!(
"found DefPathHash collision between {:?} and {:?}. \ "found DefPathHash collision between {def_path1:?} and {def_path2:?}. \
Compilation cannot continue.", Compilation cannot continue."
def_path1, def_path2
); );
} }
@ -224,7 +223,7 @@ impl DefPath {
let mut s = String::with_capacity(self.data.len() * 16); let mut s = String::with_capacity(self.data.len() * 16);
for component in &self.data { for component in &self.data {
write!(s, "::{}", component).unwrap(); write!(s, "::{component}").unwrap();
} }
s s
@ -240,7 +239,7 @@ impl DefPath {
for component in &self.data { for component in &self.data {
s.extend(opt_delimiter); s.extend(opt_delimiter);
opt_delimiter = Some('-'); opt_delimiter = Some('-');
write!(s, "{}", component).unwrap(); write!(s, "{component}").unwrap();
} }
s s
@ -433,7 +432,7 @@ impl fmt::Display for DefPathData {
match self.name() { match self.name() {
DefPathDataName::Named(name) => f.write_str(name.as_str()), DefPathDataName::Named(name) => f.write_str(name.as_str()),
// FIXME(#70334): this will generate legacy {{closure}}, {{impl}}, etc // FIXME(#70334): this will generate legacy {{closure}}, {{impl}}, etc
DefPathDataName::Anon { namespace } => write!(f, "{{{{{}}}}}", namespace), DefPathDataName::Anon { namespace } => write!(f, "{{{{{namespace}}}}}"),
} }
} }
} }

View file

@ -119,7 +119,7 @@ impl HirId {
impl fmt::Display for HirId { impl fmt::Display for HirId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self) write!(f, "{self:?}")
} }
} }

View file

@ -1268,7 +1268,7 @@ impl<'a> State<'a> {
hir::InlineAsmOperand::In { reg, ref expr } => { hir::InlineAsmOperand::In { reg, ref expr } => {
s.word("in"); s.word("in");
s.popen(); s.popen();
s.word(format!("{}", reg)); s.word(format!("{reg}"));
s.pclose(); s.pclose();
s.space(); s.space();
s.print_expr(expr); s.print_expr(expr);
@ -1276,7 +1276,7 @@ impl<'a> State<'a> {
hir::InlineAsmOperand::Out { reg, late, ref expr } => { hir::InlineAsmOperand::Out { reg, late, ref expr } => {
s.word(if late { "lateout" } else { "out" }); s.word(if late { "lateout" } else { "out" });
s.popen(); s.popen();
s.word(format!("{}", reg)); s.word(format!("{reg}"));
s.pclose(); s.pclose();
s.space(); s.space();
match expr { match expr {
@ -1287,7 +1287,7 @@ impl<'a> State<'a> {
hir::InlineAsmOperand::InOut { reg, late, ref expr } => { hir::InlineAsmOperand::InOut { reg, late, ref expr } => {
s.word(if late { "inlateout" } else { "inout" }); s.word(if late { "inlateout" } else { "inout" });
s.popen(); s.popen();
s.word(format!("{}", reg)); s.word(format!("{reg}"));
s.pclose(); s.pclose();
s.space(); s.space();
s.print_expr(expr); s.print_expr(expr);
@ -1295,7 +1295,7 @@ impl<'a> State<'a> {
hir::InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => { hir::InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
s.word(if late { "inlateout" } else { "inout" }); s.word(if late { "inlateout" } else { "inout" });
s.popen(); s.popen();
s.word(format!("{}", reg)); s.word(format!("{reg}"));
s.pclose(); s.pclose();
s.space(); s.space();
s.print_expr(in_expr); s.print_expr(in_expr);

View file

@ -1091,7 +1091,7 @@ impl<T: Idx> ToString for BitSet<T> {
assert!(mask <= 0xFF); assert!(mask <= 0xFF);
let byte = word & mask; let byte = word & mask;
result.push_str(&format!("{}{:02x}", sep, byte)); result.push_str(&format!("{sep}{byte:02x}"));
if remain <= 8 { if remain <= 8 {
break; break;

View file

@ -135,10 +135,7 @@ impl<I: Idx> IntervalSet<I> {
}; };
debug_assert!( debug_assert!(
self.check_invariants(), self.check_invariants(),
"wrong intervals after insert {:?}..={:?} to {:?}", "wrong intervals after insert {start:?}..={end:?} to {self:?}"
start,
end,
self
); );
result result
} }

View file

@ -90,8 +90,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String
.into_iter() .into_iter()
.map(|s| { .map(|s| {
let sess = ParseSess::with_silent_emitter(Some(format!( let sess = ParseSess::with_silent_emitter(Some(format!(
"this error occurred on the command line: `--cfg={}`", "this error occurred on the command line: `--cfg={s}`"
s
))); )));
let filename = FileName::cfg_spec_source_code(&s); let filename = FileName::cfg_spec_source_code(&s);
@ -150,8 +149,7 @@ pub fn parse_check_cfg(specs: Vec<String>) -> CheckCfg {
'specs: for s in specs { 'specs: for s in specs {
let sess = ParseSess::with_silent_emitter(Some(format!( let sess = ParseSess::with_silent_emitter(Some(format!(
"this error occurred on the command line: `--check-cfg={}`", "this error occurred on the command line: `--check-cfg={s}`"
s
))); )));
let filename = FileName::cfg_spec_source_code(&s); let filename = FileName::cfg_spec_source_code(&s);

View file

@ -620,7 +620,7 @@ fn write_out_deps(
// prevents `make` from spitting out an error if a file is later // prevents `make` from spitting out an error if a file is later
// deleted. For more info see #28735 // deleted. For more info see #28735
for path in files { for path in files {
writeln!(file, "{}:", path)?; writeln!(file, "{path}:")?;
} }
// Emit special comments with information about accessed environment variables. // Emit special comments with information about accessed environment variables.
@ -633,9 +633,9 @@ fn write_out_deps(
envs.sort_unstable(); envs.sort_unstable();
writeln!(file)?; writeln!(file)?;
for (k, v) in envs { for (k, v) in envs {
write!(file, "# env-dep:{}", k)?; write!(file, "# env-dep:{k}")?;
if let Some(v) = v { if let Some(v) = v {
write!(file, "={}", v)?; write!(file, "={v}")?;
} }
writeln!(file)?; writeln!(file)?;
} }

View file

@ -205,13 +205,13 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
fn load_backend_from_dylib(path: &Path) -> MakeBackendFn { fn load_backend_from_dylib(path: &Path) -> MakeBackendFn {
let lib = unsafe { Library::new(path) }.unwrap_or_else(|err| { let lib = unsafe { Library::new(path) }.unwrap_or_else(|err| {
let err = format!("couldn't load codegen backend {:?}: {}", path, err); let err = format!("couldn't load codegen backend {path:?}: {err}");
early_error(ErrorOutputType::default(), &err); early_error(ErrorOutputType::default(), &err);
}); });
let backend_sym = unsafe { lib.get::<MakeBackendFn>(b"__rustc_codegen_backend") } let backend_sym = unsafe { lib.get::<MakeBackendFn>(b"__rustc_codegen_backend") }
.unwrap_or_else(|e| { .unwrap_or_else(|e| {
let err = format!("couldn't load codegen backend: {}", e); let err = format!("couldn't load codegen backend: {e}");
early_error(ErrorOutputType::default(), &err); early_error(ErrorOutputType::default(), &err);
}); });
@ -304,8 +304,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
.join("\n* "); .join("\n* ");
let err = format!( let err = format!(
"failed to find a `codegen-backends` folder \ "failed to find a `codegen-backends` folder \
in the sysroot candidates:\n* {}", in the sysroot candidates:\n* {candidates}"
candidates
); );
early_error(ErrorOutputType::default(), &err); early_error(ErrorOutputType::default(), &err);
}); });
@ -325,7 +324,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
let expected_names = &[ let expected_names = &[
format!("rustc_codegen_{}-{}", backend_name, env!("CFG_RELEASE")), format!("rustc_codegen_{}-{}", backend_name, env!("CFG_RELEASE")),
format!("rustc_codegen_{}", backend_name), format!("rustc_codegen_{backend_name}"),
]; ];
for entry in d.filter_map(|e| e.ok()) { for entry in d.filter_map(|e| e.ok()) {
let path = entry.path(); let path = entry.path();
@ -354,7 +353,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
match file { match file {
Some(ref s) => load_backend_from_dylib(s), Some(ref s) => load_backend_from_dylib(s),
None => { None => {
let err = format!("unsupported builtin codegen backend `{}`", backend_name); let err = format!("unsupported builtin codegen backend `{backend_name}`");
early_error(ErrorOutputType::default(), &err); early_error(ErrorOutputType::default(), &err);
} }
} }
@ -389,7 +388,7 @@ pub(crate) fn check_attr_crate_type(
BuiltinLintDiagnostics::UnknownCrateTypes( BuiltinLintDiagnostics::UnknownCrateTypes(
span, span,
"did you mean".to_string(), "did you mean".to_string(),
format!("\"{}\"", candidate), format!("\"{candidate}\""),
), ),
); );
} else { } else {

View file

@ -58,7 +58,7 @@ fn restore_library_path() {
/// Supposed to be used for all variables except those set for build scripts by cargo /// Supposed to be used for all variables except those set for build scripts by cargo
/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts> /// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts>
fn tracked_env_var_os<K: AsRef<OsStr> + Display>(key: K) -> Option<OsString> { fn tracked_env_var_os<K: AsRef<OsStr> + Display>(key: K) -> Option<OsString> {
println!("cargo:rerun-if-env-changed={}", key); println!("cargo:rerun-if-env-changed={key}");
env::var_os(key) env::var_os(key)
} }
@ -84,7 +84,7 @@ fn output(cmd: &mut Command) -> String {
let output = match cmd.stderr(Stdio::inherit()).output() { let output = match cmd.stderr(Stdio::inherit()).output() {
Ok(status) => status, Ok(status) => status,
Err(e) => { Err(e) => {
println!("\n\nfailed to execute command: {:?}\nerror: {}\n\n", cmd, e); println!("\n\nfailed to execute command: {cmd:?}\nerror: {e}\n\n");
std::process::exit(1); std::process::exit(1);
} }
}; };
@ -100,7 +100,7 @@ fn output(cmd: &mut Command) -> String {
fn main() { fn main() {
for component in REQUIRED_COMPONENTS.iter().chain(OPTIONAL_COMPONENTS.iter()) { for component in REQUIRED_COMPONENTS.iter().chain(OPTIONAL_COMPONENTS.iter()) {
println!("cargo:rustc-check-cfg=values(llvm_component,\"{}\")", component); println!("cargo:rustc-check-cfg=values(llvm_component,\"{component}\")");
} }
if tracked_env_var_os("RUST_CHECK").is_some() { if tracked_env_var_os("RUST_CHECK").is_some() {
@ -164,12 +164,12 @@ fn main() {
for component in REQUIRED_COMPONENTS { for component in REQUIRED_COMPONENTS {
if !components.contains(component) { if !components.contains(component) {
panic!("require llvm component {} but wasn't found", component); panic!("require llvm component {component} but wasn't found");
} }
} }
for component in components.iter() { for component in components.iter() {
println!("cargo:rustc-cfg=llvm_component=\"{}\"", component); println!("cargo:rustc-cfg=llvm_component=\"{component}\"");
} }
// Link in our own LLVM shims, compiled with the same flags as LLVM // Link in our own LLVM shims, compiled with the same flags as LLVM
@ -283,7 +283,7 @@ fn main() {
} }
let kind = if name.starts_with("LLVM") { llvm_kind } else { "dylib" }; let kind = if name.starts_with("LLVM") { llvm_kind } else { "dylib" };
println!("cargo:rustc-link-lib={}={}", kind, name); println!("cargo:rustc-link-lib={kind}={name}");
} }
// LLVM ldflags // LLVM ldflags
@ -302,11 +302,11 @@ fn main() {
println!("cargo:rustc-link-search=native={}", stripped.replace(&host, &target)); println!("cargo:rustc-link-search=native={}", stripped.replace(&host, &target));
} }
} else if let Some(stripped) = lib.strip_prefix("-LIBPATH:") { } else if let Some(stripped) = lib.strip_prefix("-LIBPATH:") {
println!("cargo:rustc-link-search=native={}", stripped); println!("cargo:rustc-link-search=native={stripped}");
} else if let Some(stripped) = lib.strip_prefix("-l") { } else if let Some(stripped) = lib.strip_prefix("-l") {
println!("cargo:rustc-link-lib={}", stripped); println!("cargo:rustc-link-lib={stripped}");
} else if let Some(stripped) = lib.strip_prefix("-L") { } else if let Some(stripped) = lib.strip_prefix("-L") {
println!("cargo:rustc-link-search=native={}", stripped); println!("cargo:rustc-link-search=native={stripped}");
} }
} }
@ -318,9 +318,9 @@ fn main() {
if let Some(s) = llvm_linker_flags { if let Some(s) = llvm_linker_flags {
for lib in s.into_string().unwrap().split_whitespace() { for lib in s.into_string().unwrap().split_whitespace() {
if let Some(stripped) = lib.strip_prefix("-l") { if let Some(stripped) = lib.strip_prefix("-l") {
println!("cargo:rustc-link-lib={}", stripped); println!("cargo:rustc-link-lib={stripped}");
} else if let Some(stripped) = lib.strip_prefix("-L") { } else if let Some(stripped) = lib.strip_prefix("-L") {
println!("cargo:rustc-link-search=native={}", stripped); println!("cargo:rustc-link-search=native={stripped}");
} }
} }
} }
@ -359,14 +359,14 @@ fn main() {
let path = PathBuf::from(s); let path = PathBuf::from(s);
println!("cargo:rustc-link-search=native={}", path.parent().unwrap().display()); println!("cargo:rustc-link-search=native={}", path.parent().unwrap().display());
if target.contains("windows") { if target.contains("windows") {
println!("cargo:rustc-link-lib=static:-bundle={}", stdcppname); println!("cargo:rustc-link-lib=static:-bundle={stdcppname}");
} else { } else {
println!("cargo:rustc-link-lib=static={}", stdcppname); println!("cargo:rustc-link-lib=static={stdcppname}");
} }
} else if cxxflags.contains("stdlib=libc++") { } else if cxxflags.contains("stdlib=libc++") {
println!("cargo:rustc-link-lib=c++"); println!("cargo:rustc-link-lib=c++");
} else { } else {
println!("cargo:rustc-link-lib={}", stdcppname); println!("cargo:rustc-link-lib={stdcppname}");
} }
} }

View file

@ -114,8 +114,7 @@ impl Display for Error {
match self { match self {
Error::InvalidColorValue(value) => write!( Error::InvalidColorValue(value) => write!(
formatter, formatter,
"invalid log color value '{}': expected one of always, never, or auto", "invalid log color value '{value}': expected one of always, never, or auto",
value,
), ),
Error::NonUnicodeColorValue => write!( Error::NonUnicodeColorValue => write!(
formatter, formatter,

View file

@ -76,11 +76,11 @@ pub(crate) fn invalid_attr(attr: &Attribute, meta: &Meta) -> Diagnostic {
let span = attr.span().unwrap(); let span = attr.span().unwrap();
let path = path_to_string(&attr.path); let path = path_to_string(&attr.path);
match meta { match meta {
Meta::Path(_) => span_err(span, &format!("`#[{}]` is not a valid attribute", path)), Meta::Path(_) => span_err(span, &format!("`#[{path}]` is not a valid attribute")),
Meta::NameValue(_) => { Meta::NameValue(_) => {
span_err(span, &format!("`#[{} = ...]` is not a valid attribute", path)) span_err(span, &format!("`#[{path} = ...]` is not a valid attribute"))
} }
Meta::List(_) => span_err(span, &format!("`#[{}(...)]` is not a valid attribute", path)), Meta::List(_) => span_err(span, &format!("`#[{path}(...)]` is not a valid attribute")),
} }
} }
@ -107,7 +107,7 @@ pub(crate) fn invalid_nested_attr(attr: &Attribute, nested: &NestedMeta) -> Diag
let meta = match nested { let meta = match nested {
syn::NestedMeta::Meta(meta) => meta, syn::NestedMeta::Meta(meta) => meta,
syn::NestedMeta::Lit(_) => { syn::NestedMeta::Lit(_) => {
return span_err(span, &format!("`#[{}(\"...\")]` is not a valid attribute", name)); return span_err(span, &format!("`#[{name}(\"...\")]` is not a valid attribute"));
} }
}; };
@ -115,13 +115,11 @@ pub(crate) fn invalid_nested_attr(attr: &Attribute, nested: &NestedMeta) -> Diag
let path = path_to_string(meta.path()); let path = path_to_string(meta.path());
match meta { match meta {
Meta::NameValue(..) => { Meta::NameValue(..) => {
span_err(span, &format!("`#[{}({} = ...)]` is not a valid attribute", name, path)) span_err(span, &format!("`#[{name}({path} = ...)]` is not a valid attribute"))
}
Meta::Path(..) => {
span_err(span, &format!("`#[{}({})]` is not a valid attribute", name, path))
} }
Meta::Path(..) => span_err(span, &format!("`#[{name}({path})]` is not a valid attribute")),
Meta::List(..) => { Meta::List(..) => {
span_err(span, &format!("`#[{}({}(...))]` is not a valid attribute", name, path)) span_err(span, &format!("`#[{name}({path}(...))]` is not a valid attribute"))
} }
} }
} }

View file

@ -178,7 +178,7 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
opt: Default::default(), opt: Default::default(),
}; };
let dl = DisplayList::from(snippet); let dl = DisplayList::from(snippet);
eprintln!("{}\n", dl); eprintln!("{dl}\n");
} }
continue; continue;
} }
@ -265,7 +265,7 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
Diagnostic::spanned( Diagnostic::spanned(
path_span, path_span,
Level::Error, Level::Error,
format!("overrides existing {}: `{}`", kind, id), format!("overrides existing {kind}: `{id}`"),
) )
.span_help(previous_defns[&id], "previously defined in this resource") .span_help(previous_defns[&id], "previously defined in this resource")
.emit(); .emit();

View file

@ -198,8 +198,7 @@ impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> {
throw_span_err!( throw_span_err!(
attr.span().unwrap(), attr.span().unwrap(),
&format!( &format!(
"diagnostic slug must be first argument of a `#[{}(...)]` attribute", "diagnostic slug must be first argument of a `#[{name}(...)]` attribute"
name
) )
); );
}; };

View file

@ -322,7 +322,7 @@ pub(crate) trait HasFieldMap {
None => { None => {
span_err( span_err(
span.unwrap(), span.unwrap(),
&format!("`{}` doesn't refer to a field on this type", field), &format!("`{field}` doesn't refer to a field on this type"),
) )
.emit(); .emit();
quote! { quote! {
@ -603,8 +603,7 @@ impl SubdiagnosticKind {
if suggestion_kind != SuggestionKind::Normal { if suggestion_kind != SuggestionKind::Normal {
invalid_attr(attr, &meta) invalid_attr(attr, &meta)
.help(format!( .help(format!(
r#"Use `#[suggestion(..., style = "{}")]` instead"#, r#"Use `#[suggestion(..., style = "{suggestion_kind}")]` instead"#
suggestion_kind
)) ))
.emit(); .emit();
} }
@ -621,8 +620,7 @@ impl SubdiagnosticKind {
if suggestion_kind != SuggestionKind::Normal { if suggestion_kind != SuggestionKind::Normal {
invalid_attr(attr, &meta) invalid_attr(attr, &meta)
.help(format!( .help(format!(
r#"Use `#[multipart_suggestion(..., style = "{}")]` instead"#, r#"Use `#[multipart_suggestion(..., style = "{suggestion_kind}")]` instead"#
suggestion_kind
)) ))
.emit(); .emit();
} }

View file

@ -41,7 +41,7 @@ impl Parse for Newtype {
}; };
if let Some(old) = max.replace(literal.lit) { if let Some(old) = max.replace(literal.lit) {
panic!("Specified multiple max: {:?}", old); panic!("Specified multiple max: {old:?}");
} }
false false
@ -52,7 +52,7 @@ impl Parse for Newtype {
}; };
if let Some(old) = debug_format.replace(literal.lit) { if let Some(old) = debug_format.replace(literal.lit) {
panic!("Specified multiple debug format options: {:?}", old); panic!("Specified multiple debug format options: {old:?}");
} }
false false

View file

@ -239,7 +239,7 @@ fn doc_comment_from_desc(list: &Punctuated<Expr, token::Comma>) -> Result<Attrib
.unwrap(); .unwrap();
}, },
); );
let doc_string = format!("[query description - consider adding a doc-comment!] {}", doc_string); let doc_string = format!("[query description - consider adding a doc-comment!] {doc_string}");
Ok(parse_quote! { #[doc = #doc_string] }) Ok(parse_quote! { #[doc = #doc_string] })
} }

View file

@ -134,7 +134,7 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
let mut check_dup = |span: Span, str: &str, errors: &mut Errors| { let mut check_dup = |span: Span, str: &str, errors: &mut Errors| {
if let Some(prev_span) = keys.get(str) { if let Some(prev_span) = keys.get(str) {
errors.error(span, format!("Symbol `{}` is duplicated", str)); errors.error(span, format!("Symbol `{str}` is duplicated"));
errors.error(*prev_span, "location of previous definition".to_string()); errors.error(*prev_span, "location of previous definition".to_string());
} else { } else {
keys.insert(str.to_string(), span); keys.insert(str.to_string(), span);
@ -144,8 +144,8 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
let mut check_order = |span: Span, str: &str, errors: &mut Errors| { let mut check_order = |span: Span, str: &str, errors: &mut Errors| {
if let Some((prev_span, ref prev_str)) = prev_key { if let Some((prev_span, ref prev_str)) = prev_key {
if str < prev_str { if str < prev_str {
errors.error(span, format!("Symbol `{}` must precede `{}`", str, prev_str)); errors.error(span, format!("Symbol `{str}` must precede `{prev_str}`"));
errors.error(prev_span, format!("location of previous symbol `{}`", prev_str)); errors.error(prev_span, format!("location of previous symbol `{prev_str}`"));
} }
} }
prev_key = Some((span, str.to_string())); prev_key = Some((span, str.to_string()));

View file

@ -112,7 +112,7 @@ impl<'a> std::fmt::Debug for CrateDump<'a> {
writeln!(fmt, "resolved crates:")?; writeln!(fmt, "resolved crates:")?;
for (cnum, data) in self.0.iter_crate_data() { for (cnum, data) in self.0.iter_crate_data() {
writeln!(fmt, " name: {}", data.name())?; writeln!(fmt, " name: {}", data.name())?;
writeln!(fmt, " cnum: {}", cnum)?; writeln!(fmt, " cnum: {cnum}")?;
writeln!(fmt, " hash: {}", data.hash())?; writeln!(fmt, " hash: {}", data.hash())?;
writeln!(fmt, " reqd: {:?}", data.dep_kind())?; writeln!(fmt, " reqd: {:?}", data.dep_kind())?;
let CrateSource { dylib, rlib, rmeta } = data.source(); let CrateSource { dylib, rlib, rmeta } = data.source();
@ -150,7 +150,7 @@ impl CStore {
pub(crate) fn get_crate_data(&self, cnum: CrateNum) -> CrateMetadataRef<'_> { pub(crate) fn get_crate_data(&self, cnum: CrateNum) -> CrateMetadataRef<'_> {
let cdata = self.metas[cnum] let cdata = self.metas[cnum]
.as_ref() .as_ref()
.unwrap_or_else(|| panic!("Failed to get crate data for {:?}", cnum)); .unwrap_or_else(|| panic!("Failed to get crate data for {cnum:?}"));
CrateMetadataRef { cdata, cstore: self } CrateMetadataRef { cdata, cstore: self }
} }

View file

@ -45,7 +45,7 @@ pub fn find_native_static_library(
for path in search_paths { for path in search_paths {
for (prefix, suffix) in &formats { for (prefix, suffix) in &formats {
let test = path.join(format!("{}{}{}", prefix, name, suffix)); let test = path.join(format!("{prefix}{name}{suffix}"));
if test.exists() { if test.exists() {
return test; return test;
} }

View file

@ -462,7 +462,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for SyntaxContext {
.root .root
.syntax_contexts .syntax_contexts
.get(cdata, id) .get(cdata, id)
.unwrap_or_else(|| panic!("Missing SyntaxContext {:?} for crate {:?}", id, cname)) .unwrap_or_else(|| panic!("Missing SyntaxContext {id:?} for crate {cname:?}"))
.decode((cdata, sess)) .decode((cdata, sess))
}) })
} }
@ -806,7 +806,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
.tables .tables
.def_span .def_span
.get(self, index) .get(self, index)
.unwrap_or_else(|| panic!("Missing span for {:?}", index)) .unwrap_or_else(|| panic!("Missing span for {index:?}"))
.decode((self, sess)) .decode((self, sess))
} }
@ -1249,7 +1249,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
.tables .tables
.proc_macro_quoted_spans .proc_macro_quoted_spans
.get(self, index) .get(self, index)
.unwrap_or_else(|| panic!("Missing proc macro quoted span: {:?}", index)) .unwrap_or_else(|| panic!("Missing proc macro quoted span: {index:?}"))
.decode((self, sess)) .decode((self, sess))
} }

View file

@ -230,7 +230,7 @@ provide! { tcx, def_id, other, cdata,
.trait_impl_trait_tys .trait_impl_trait_tys
.get(cdata, def_id.index) .get(cdata, def_id.index)
.map(|lazy| lazy.decode((cdata, tcx))) .map(|lazy| lazy.decode((cdata, tcx)))
.process_decoded(tcx, || panic!("{:?} does not have trait_impl_trait_tys", def_id))) .process_decoded(tcx, || panic!("{def_id:?} does not have trait_impl_trait_tys")))
} }
visibility => { cdata.get_visibility(def_id.index) } visibility => { cdata.get_visibility(def_id.index) }

View file

@ -58,7 +58,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for DefPathHashMapRef<'static>
let _ = d.read_raw_bytes(len); let _ = d.read_raw_bytes(len);
let inner = odht::HashTable::from_raw_bytes(o).unwrap_or_else(|e| { let inner = odht::HashTable::from_raw_bytes(o).unwrap_or_else(|e| {
panic!("decode error: {}", e); panic!("decode error: {e}");
}); });
DefPathHashMapRef::OwnedFromMetadata(inner) DefPathHashMapRef::OwnedFromMetadata(inner)
} }

View file

@ -145,7 +145,7 @@ impl<'a, 'tcx, I, T> Encodable<EncodeContext<'a, 'tcx>> for LazyTable<I, T> {
impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for CrateNum { impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for CrateNum {
fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) { fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) {
if *self != LOCAL_CRATE && s.is_proc_macro { if *self != LOCAL_CRATE && s.is_proc_macro {
panic!("Attempted to encode non-local CrateNum {:?} for proc-macro crate", self); panic!("Attempted to encode non-local CrateNum {self:?} for proc-macro crate");
} }
s.emit_u32(self.as_u32()); s.emit_u32(self.as_u32());
} }
@ -276,7 +276,7 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
// Introduce a new scope so that we drop the 'lock()' temporary // Introduce a new scope so that we drop the 'lock()' temporary
match &*source_file.external_src.lock() { match &*source_file.external_src.lock() {
ExternalSource::Foreign { metadata_index, .. } => *metadata_index, ExternalSource::Foreign { metadata_index, .. } => *metadata_index,
src => panic!("Unexpected external source {:?}", src), src => panic!("Unexpected external source {src:?}"),
} }
}; };
@ -733,12 +733,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let prefix = "meta-stats"; let prefix = "meta-stats";
let perc = |bytes| (bytes * 100) as f64 / total_bytes as f64; let perc = |bytes| (bytes * 100) as f64 / total_bytes as f64;
eprintln!("{} METADATA STATS", prefix); eprintln!("{prefix} METADATA STATS");
eprintln!("{} {:<23}{:>10}", prefix, "Section", "Size"); eprintln!("{} {:<23}{:>10}", prefix, "Section", "Size");
eprintln!( eprintln!("{prefix} ----------------------------------------------------------------");
"{} ----------------------------------------------------------------",
prefix
);
for (label, size) in stats { for (label, size) in stats {
eprintln!( eprintln!(
"{} {:<23}{:>10} ({:4.1}%)", "{} {:<23}{:>10} ({:4.1}%)",
@ -748,10 +745,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
perc(size) perc(size)
); );
} }
eprintln!( eprintln!("{prefix} ----------------------------------------------------------------");
"{} ----------------------------------------------------------------",
prefix
);
eprintln!( eprintln!(
"{} {:<23}{:>10} (of which {:.1}% are zero bytes)", "{} {:<23}{:>10} (of which {:.1}% are zero bytes)",
prefix, prefix,
@ -759,7 +753,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
to_readable_str(total_bytes), to_readable_str(total_bytes),
perc(zero_bytes) perc(zero_bytes)
); );
eprintln!("{}", prefix); eprintln!("{prefix}");
} }
root root

View file

@ -143,7 +143,7 @@ where
", " ", "
}; };
write!(f, "{}", delim)?; write!(f, "{delim}")?;
idx.fmt_with(ctxt, f)?; idx.fmt_with(ctxt, f)?;
first = false; first = false;
} }
@ -164,7 +164,7 @@ where
", " ", "
}; };
write!(f, "{}", delim)?; write!(f, "{delim}")?;
idx.fmt_with(ctxt, f)?; idx.fmt_with(ctxt, f)?;
first = false; first = false;
} }

View file

@ -71,7 +71,7 @@ where
fn graph_id(&self) -> dot::Id<'_> { fn graph_id(&self) -> dot::Id<'_> {
let name = graphviz_safe_def_name(self.body.source.def_id()); let name = graphviz_safe_def_name(self.body.source.def_id());
dot::Id::new(format!("graph_for_def_id_{}", name)).unwrap() dot::Id::new(format!("graph_for_def_id_{name}")).unwrap()
} }
fn node_id(&self, n: &Self::Node) -> dot::Id<'_> { fn node_id(&self, n: &Self::Node) -> dot::Id<'_> {
@ -190,7 +190,7 @@ where
" cellpadding=\"3\"", " cellpadding=\"3\"",
" sides=\"rb\"", " sides=\"rb\"",
); );
write!(w, r#"<table{fmt}>"#, fmt = table_fmt)?; write!(w, r#"<table{table_fmt}>"#)?;
// A + B: Block header // A + B: Block header
match self.style { match self.style {
@ -372,7 +372,7 @@ where
write!(w, concat!("<tr>", r#"<td colspan="2" {fmt}>MIR</td>"#,), fmt = fmt,)?; write!(w, concat!("<tr>", r#"<td colspan="2" {fmt}>MIR</td>"#,), fmt = fmt,)?;
for name in state_column_names { for name in state_column_names {
write!(w, "<td {fmt}>{name}</td>", fmt = fmt, name = name)?; write!(w, "<td {fmt}>{name}</td>")?;
} }
write!(w, "</tr>") write!(w, "</tr>")
@ -394,18 +394,18 @@ where
}; };
for (i, statement) in body[block].statements.iter().enumerate() { for (i, statement) in body[block].statements.iter().enumerate() {
let statement_str = format!("{:?}", statement); let statement_str = format!("{statement:?}");
let index_str = format!("{}", i); let index_str = format!("{i}");
let after = next_in_dataflow_order(&mut afters); let after = next_in_dataflow_order(&mut afters);
let before = befores.as_mut().map(next_in_dataflow_order); let before = befores.as_mut().map(next_in_dataflow_order);
self.write_row(w, &index_str, &statement_str, |_this, w, fmt| { self.write_row(w, &index_str, &statement_str, |_this, w, fmt| {
if let Some(before) = before { if let Some(before) = before {
write!(w, r#"<td {fmt} align="left">{diff}</td>"#, fmt = fmt, diff = before)?; write!(w, r#"<td {fmt} align="left">{before}</td>"#)?;
} }
write!(w, r#"<td {fmt} align="left">{diff}</td>"#, fmt = fmt, diff = after) write!(w, r#"<td {fmt} align="left">{after}</td>"#)
})?; })?;
} }
@ -421,10 +421,10 @@ where
self.write_row(w, "T", &terminator_str, |_this, w, fmt| { self.write_row(w, "T", &terminator_str, |_this, w, fmt| {
if let Some(before) = before { if let Some(before) = before {
write!(w, r#"<td {fmt} align="left">{diff}</td>"#, fmt = fmt, diff = before)?; write!(w, r#"<td {fmt} align="left">{before}</td>"#)?;
} }
write!(w, r#"<td {fmt} align="left">{diff}</td>"#, fmt = fmt, diff = after) write!(w, r#"<td {fmt} align="left">{after}</td>"#)
}) })
} }

View file

@ -129,13 +129,13 @@ impl<'tcx> fmt::Debug for MovePath<'tcx> {
fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(w, "MovePath {{")?; write!(w, "MovePath {{")?;
if let Some(parent) = self.parent { if let Some(parent) = self.parent {
write!(w, " parent: {:?},", parent)?; write!(w, " parent: {parent:?},")?;
} }
if let Some(first_child) = self.first_child { if let Some(first_child) = self.first_child {
write!(w, " first_child: {:?},", first_child)?; write!(w, " first_child: {first_child:?},")?;
} }
if let Some(next_sibling) = self.next_sibling { if let Some(next_sibling) = self.next_sibling {
write!(w, " next_sibling: {:?}", next_sibling)?; write!(w, " next_sibling: {next_sibling:?}")?;
} }
write!(w, " place: {:?} }}", self.place) write!(w, " place: {:?} }}", self.place)
} }

View file

@ -920,7 +920,7 @@ fn debug_with_context<V: Debug + Eq>(
) -> std::fmt::Result { ) -> std::fmt::Result {
for (local, place) in map.locals.iter_enumerated() { for (local, place) in map.locals.iter_enumerated() {
if let Some(place) = place { if let Some(place) = place {
debug_with_context_rec(*place, &format!("{:?}", local), new, old, map, f)?; debug_with_context_rec(*place, &format!("{local:?}"), new, old, map, f)?;
} }
} }
Ok(()) Ok(())

View file

@ -526,7 +526,7 @@ fn collect_items_rec<'tcx>(
let formatted_item = with_no_trimmed_paths!(starting_point.node.to_string()); let formatted_item = with_no_trimmed_paths!(starting_point.node.to_string());
tcx.sess.span_note_without_error( tcx.sess.span_note_without_error(
starting_point.span, starting_point.span,
&format!("the above error was encountered while instantiating `{}`", formatted_item), &format!("the above error was encountered while instantiating `{formatted_item}`"),
); );
} }
inlining_map.lock_mut().record_accesses(starting_point.node, &neighbors.items); inlining_map.lock_mut().record_accesses(starting_point.node, &neighbors.items);

View file

@ -50,7 +50,7 @@ impl IntoDiagnostic<'_> for UnusedGenericParams {
// FIXME: I can figure out how to do a label with a fluent string with a fixed message, // FIXME: I can figure out how to do a label with a fluent string with a fixed message,
// or a label with a dynamic value in a hard-coded string, but I haven't figured out // or a label with a dynamic value in a hard-coded string, but I haven't figured out
// how to combine the two. 😢 // how to combine the two. 😢
diag.span_label(span, format!("generic parameter `{}` is unused", name)); diag.span_label(span, format!("generic parameter `{name}` is unused"));
} }
diag diag
} }

View file

@ -285,7 +285,7 @@ where
use std::fmt::Write; use std::fmt::Write;
let s = &mut String::new(); let s = &mut String::new();
let _ = writeln!(s, "{}", label); let _ = writeln!(s, "{label}");
for cgu in cgus { for cgu in cgus {
let _ = let _ =
writeln!(s, "CodegenUnit {} estimated size {} :", cgu.name(), cgu.size_estimate()); writeln!(s, "CodegenUnit {} estimated size {} :", cgu.name(), cgu.size_estimate());
@ -355,9 +355,8 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
} else { } else {
if mode_string != "lazy" { if mode_string != "lazy" {
let message = format!( let message = format!(
"Unknown codegen-item collection mode '{}'. \ "Unknown codegen-item collection mode '{mode_string}'. \
Falling back to 'lazy' mode.", Falling back to 'lazy' mode."
mode_string
); );
tcx.sess.warn(&message); tcx.sess.warn(&message);
} }
@ -470,7 +469,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
item_keys.sort(); item_keys.sort();
for item in item_keys { for item in item_keys {
println!("MONO_ITEM {}", item); println!("MONO_ITEM {item}");
} }
} }
@ -596,6 +595,6 @@ pub fn provide(providers: &mut Providers) {
let (_, all) = tcx.collect_and_partition_mono_items(()); let (_, all) = tcx.collect_and_partition_mono_items(());
all.iter() all.iter()
.find(|cgu| cgu.name() == name) .find(|cgu| cgu.name() == name)
.unwrap_or_else(|| panic!("failed to find cgu with name {:?}", name)) .unwrap_or_else(|| panic!("failed to find cgu with name {name:?}"))
}; };
} }

View file

@ -40,12 +40,12 @@ pub(crate) fn dump_closure_profile<'tcx>(tcx: TyCtxt<'tcx>, closure_instance: In
let new_size = tcx let new_size = tcx
.layout_of(param_env.and(after_feature_tys)) .layout_of(param_env.and(after_feature_tys))
.map(|l| format!("{:?}", l.size.bytes())) .map(|l| format!("{:?}", l.size.bytes()))
.unwrap_or_else(|e| format!("Failed {:?}", e)); .unwrap_or_else(|e| format!("Failed {e:?}"));
let old_size = tcx let old_size = tcx
.layout_of(param_env.and(before_feature_tys)) .layout_of(param_env.and(before_feature_tys))
.map(|l| format!("{:?}", l.size.bytes())) .map(|l| format!("{:?}", l.size.bytes()))
.unwrap_or_else(|e| format!("Failed {:?}", e)); .unwrap_or_else(|e| format!("Failed {e:?}"));
let closure_span = tcx.def_span(closure_def_id); let closure_span = tcx.def_span(closure_def_id);
let src_file = tcx.sess.source_map().span_to_filename(closure_span); let src_file = tcx.sess.source_map().span_to_filename(closure_span);
@ -54,7 +54,7 @@ pub(crate) fn dump_closure_profile<'tcx>(tcx: TyCtxt<'tcx>, closure_instance: In
.source_map() .source_map()
.span_to_lines(closure_span) .span_to_lines(closure_span)
.map(|l| format!("{:?} {:?}", l.lines.first(), l.lines.last())) .map(|l| format!("{:?} {:?}", l.lines.first(), l.lines.last()))
.unwrap_or_else(|e| format!("{:?}", e)); .unwrap_or_else(|e| format!("{e:?}"));
if let Err(e) = writeln!( if let Err(e) = writeln!(
file, file,
@ -64,7 +64,7 @@ pub(crate) fn dump_closure_profile<'tcx>(tcx: TyCtxt<'tcx>, closure_instance: In
src_file.prefer_local(), src_file.prefer_local(),
line_nos line_nos
) { ) {
eprintln!("Error writing to file {}", e) eprintln!("Error writing to file {e}")
} }
} }
} }

View file

@ -447,7 +447,7 @@ impl<'a> Parser<'a> {
Some(pos) Some(pos)
} else { } else {
let pos = self.to_span_index(pos); let pos = self.to_span_index(pos);
let description = format!("expected `'}}'`, found `{:?}`", maybe); let description = format!("expected `'}}'`, found `{maybe:?}`");
let label = "expected `}`".to_owned(); let label = "expected `}`".to_owned();
let (note, secondary_label) = if c == '}' { let (note, secondary_label) = if c == '}' {
( (
@ -471,12 +471,12 @@ impl<'a> Parser<'a> {
None None
} }
} else { } else {
let description = format!("expected `{:?}` but string was terminated", c); let description = format!("expected `{c:?}` but string was terminated");
// point at closing `"` // point at closing `"`
let pos = self.input.len() - if self.append_newline { 1 } else { 0 }; let pos = self.input.len() - if self.append_newline { 1 } else { 0 };
let pos = self.to_span_index(pos); let pos = self.to_span_index(pos);
if c == '}' { if c == '}' {
let label = format!("expected `{:?}`", c); let label = format!("expected `{c:?}`");
let (note, secondary_label) = if c == '}' { let (note, secondary_label) = if c == '}' {
( (
Some( Some(
@ -497,7 +497,7 @@ impl<'a> Parser<'a> {
should_be_replaced_with_positional_argument: false, should_be_replaced_with_positional_argument: false,
}); });
} else { } else {
self.err(description, format!("expected `{:?}`", c), pos.to(pos)); self.err(description, format!("expected `{c:?}`"), pos.to(pos));
} }
None None
} }

View file

@ -915,7 +915,7 @@ impl<'tcx, 'a> TestReachabilityVisitor<'tcx, 'a> {
if level != Level::Direct { if level != Level::Direct {
error_msg.push_str(", "); error_msg.push_str(", ");
} }
error_msg.push_str(&format!("{:?}: {}", level, vis_str)); error_msg.push_str(&format!("{level:?}: {vis_str}"));
} }
} else { } else {
error_msg.push_str("not in the table"); error_msg.push_str("not in the table");

View file

@ -787,7 +787,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
// which means that the definition with this hash is guaranteed to // which means that the definition with this hash is guaranteed to
// still exist in the current compilation session. // still exist in the current compilation session.
d.tcx.def_path_hash_to_def_id(def_path_hash, &mut || { d.tcx.def_path_hash_to_def_id(def_path_hash, &mut || {
panic!("Failed to convert DefPathHash {:?}", def_path_hash) panic!("Failed to convert DefPathHash {def_path_hash:?}")
}) })
} }
} }

View file

@ -321,7 +321,7 @@ pub(crate) fn create_query_frame<
ty::print::with_forced_impl_filename_line!(do_describe(tcx.tcx, key)) ty::print::with_forced_impl_filename_line!(do_describe(tcx.tcx, key))
); );
let description = let description =
if tcx.sess.verbose() { format!("{} [{:?}]", description, name) } else { description }; if tcx.sess.verbose() { format!("{description} [{name:?}]") } else { description };
let span = if kind == dep_graph::DepKind::def_span { let span = if kind == dep_graph::DepKind::def_span {
// The `def_span` query is used to calculate `default_span`, // The `def_span` query is used to calculate `default_span`,
// so exit to avoid infinite recursion. // so exit to avoid infinite recursion.

View file

@ -111,7 +111,7 @@ impl<T: Debug> IntoSelfProfilingString for T {
&self, &self,
builder: &mut QueryKeyStringBuilder<'_, '_>, builder: &mut QueryKeyStringBuilder<'_, '_>,
) -> StringId { ) -> StringId {
let s = format!("{:?}", self); let s = format!("{self:?}");
builder.profiler.alloc_string(&s[..]) builder.profiler.alloc_string(&s[..])
} }
} }

View file

@ -29,7 +29,7 @@ impl DepNodeFilter {
/// Tests whether `node` meets the filter, returning true if so. /// Tests whether `node` meets the filter, returning true if so.
pub fn test<K: DepKind>(&self, node: &DepNode<K>) -> bool { pub fn test<K: DepKind>(&self, node: &DepNode<K>) -> bool {
let debug_str = format!("{:?}", node); let debug_str = format!("{node:?}");
self.text.split('&').map(|s| s.trim()).all(|f| debug_str.contains(f)) self.text.split('&').map(|s| s.trim()).all(|f| debug_str.contains(f))
} }
} }
@ -46,7 +46,7 @@ impl<K: DepKind> EdgeFilter<K> {
pub fn new(test: &str) -> Result<EdgeFilter<K>, Box<dyn Error>> { pub fn new(test: &str) -> Result<EdgeFilter<K>, Box<dyn Error>> {
let parts: Vec<_> = test.split("->").collect(); let parts: Vec<_> = test.split("->").collect();
if parts.len() != 2 { if parts.len() != 2 {
Err(format!("expected a filter like `a&b -> c&d`, not `{}`", test).into()) Err(format!("expected a filter like `a&b -> c&d`, not `{test}`").into())
} else { } else {
Ok(EdgeFilter { Ok(EdgeFilter {
source: DepNodeFilter::new(parts[0]), source: DepNodeFilter::new(parts[0]),

View file

@ -120,7 +120,7 @@ pub trait DepNodeParams<Tcx: DepContext>: fmt::Debug + Sized {
} }
fn to_debug_str(&self, _: Tcx) -> String { fn to_debug_str(&self, _: Tcx) -> String {
format!("{:?}", self) format!("{self:?}")
} }
/// This method tries to recover the query key from the given `DepNode`, /// This method tries to recover the query key from the given `DepNode`,

View file

@ -316,10 +316,8 @@ impl<K: DepKind> DepGraph<K> {
assert!( assert!(
!self.dep_node_exists(&key), !self.dep_node_exists(&key),
"forcing query with already existing `DepNode`\n\ "forcing query with already existing `DepNode`\n\
- query-key: {:?}\n\ - query-key: {arg:?}\n\
- dep-node: {:?}", - dep-node: {key:?}"
arg,
key
); );
let task_deps = if cx.dep_context().is_eval_always(key.kind) { let task_deps = if cx.dep_context().is_eval_always(key.kind) {
@ -365,8 +363,7 @@ impl<K: DepKind> DepGraph<K> {
debug_assert!( debug_assert!(
data.colors.get(prev_index).is_none(), data.colors.get(prev_index).is_none(),
"DepGraph::with_task() - Duplicate DepNodeColor \ "DepGraph::with_task() - Duplicate DepNodeColor \
insertion for {:?}", insertion for {key:?}"
key
); );
data.colors.insert(prev_index, color); data.colors.insert(prev_index, color);
@ -447,7 +444,7 @@ impl<K: DepKind> DepGraph<K> {
TaskDepsRef::Allow(deps) => deps.lock(), TaskDepsRef::Allow(deps) => deps.lock(),
TaskDepsRef::Ignore => return, TaskDepsRef::Ignore => return,
TaskDepsRef::Forbid => { TaskDepsRef::Forbid => {
panic!("Illegal read of: {:?}", dep_node_index) panic!("Illegal read of: {dep_node_index:?}")
} }
}; };
let task_deps = &mut *task_deps; let task_deps = &mut *task_deps;
@ -824,8 +821,7 @@ impl<K: DepKind> DepGraph<K> {
debug_assert!( debug_assert!(
data.colors.get(prev_dep_node_index).is_none(), data.colors.get(prev_dep_node_index).is_none(),
"DepGraph::try_mark_previous_green() - Duplicate DepNodeColor \ "DepGraph::try_mark_previous_green() - Duplicate DepNodeColor \
insertion for {:?}", insertion for {dep_node:?}"
dep_node
); );
if !side_effects.is_empty() { if !side_effects.is_empty() {
@ -1164,7 +1160,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
if let Some(fingerprint) = fingerprint { if let Some(fingerprint) = fingerprint {
if fingerprint == prev_graph.fingerprint_by_index(prev_index) { if fingerprint == prev_graph.fingerprint_by_index(prev_index) {
if print_status { if print_status {
eprintln!("[task::green] {:?}", key); eprintln!("[task::green] {key:?}");
} }
// This is a green node: it existed in the previous compilation, // This is a green node: it existed in the previous compilation,
@ -1186,7 +1182,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
(dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index)))) (dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index))))
} else { } else {
if print_status { if print_status {
eprintln!("[task::red] {:?}", key); eprintln!("[task::red] {key:?}");
} }
// This is a red node: it existed in the previous compilation, its query // This is a red node: it existed in the previous compilation, its query
@ -1209,7 +1205,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
} }
} else { } else {
if print_status { if print_status {
eprintln!("[task::unknown] {:?}", key); eprintln!("[task::unknown] {key:?}");
} }
// This is a red node, effectively: it existed in the previous compilation // This is a red node, effectively: it existed in the previous compilation
@ -1234,7 +1230,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
} }
} else { } else {
if print_status { if print_status {
eprintln!("[task::new] {:?}", key); eprintln!("[task::new] {key:?}");
} }
let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO); let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO);

View file

@ -270,17 +270,14 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
eprintln!("[incremental]"); eprintln!("[incremental]");
eprintln!("[incremental] DepGraph Statistics"); eprintln!("[incremental] DepGraph Statistics");
eprintln!("{}", SEPARATOR); eprintln!("{SEPARATOR}");
eprintln!("[incremental]"); eprintln!("[incremental]");
eprintln!("[incremental] Total Node Count: {}", status.total_node_count); eprintln!("[incremental] Total Node Count: {}", status.total_node_count);
eprintln!("[incremental] Total Edge Count: {}", status.total_edge_count); eprintln!("[incremental] Total Edge Count: {}", status.total_edge_count);
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
eprintln!("[incremental] Total Edge Reads: {}", total_read_count); eprintln!("[incremental] Total Edge Reads: {total_read_count}");
eprintln!( eprintln!("[incremental] Total Duplicate Edge Reads: {total_duplicate_read_count}");
"[incremental] Total Duplicate Edge Reads: {}",
total_duplicate_read_count
);
} }
eprintln!("[incremental]"); eprintln!("[incremental]");
@ -288,7 +285,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
"[incremental] {:<36}| {:<17}| {:<12}| {:<17}|", "[incremental] {:<36}| {:<17}| {:<12}| {:<17}|",
"Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count" "Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count"
); );
eprintln!("{}", SEPARATOR); eprintln!("{SEPARATOR}");
for stat in stats { for stat in stats {
let node_kind_ratio = let node_kind_ratio =
@ -304,7 +301,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
); );
} }
eprintln!("{}", SEPARATOR); eprintln!("{SEPARATOR}");
eprintln!("[incremental]"); eprintln!("[incremental]");
} }
} }

View file

@ -549,8 +549,7 @@ where
// can be forced from `DepNode`. // can be forced from `DepNode`.
debug_assert!( debug_assert!(
!qcx.dep_context().fingerprint_style(dep_node.kind).reconstructible(), !qcx.dep_context().fingerprint_style(dep_node.kind).reconstructible(),
"missing on-disk cache entry for {:?}", "missing on-disk cache entry for {dep_node:?}"
dep_node
); );
} }
@ -589,8 +588,7 @@ where
{ {
assert!( assert!(
tcx.dep_graph().is_green(dep_node), tcx.dep_graph().is_green(dep_node),
"fingerprint for green query instance not loaded from cache: {:?}", "fingerprint for green query instance not loaded from cache: {dep_node:?}",
dep_node,
); );
let new_hash = hash_result.map_or(Fingerprint::ZERO, |f| { let new_hash = hash_result.map_or(Fingerprint::ZERO, |f| {
@ -669,16 +667,16 @@ fn incremental_verify_ich_failed(sess: &Session, dep_node: DebugArg<'_>, result:
sess.emit_err(crate::error::Reentrant); sess.emit_err(crate::error::Reentrant);
} else { } else {
let run_cmd = if let Some(crate_name) = &sess.opts.crate_name { let run_cmd = if let Some(crate_name) = &sess.opts.crate_name {
format!("`cargo clean -p {}` or `cargo clean`", crate_name) format!("`cargo clean -p {crate_name}` or `cargo clean`")
} else { } else {
"`cargo clean`".to_string() "`cargo clean`".to_string()
}; };
sess.emit_err(crate::error::IncrementCompilation { sess.emit_err(crate::error::IncrementCompilation {
run_cmd, run_cmd,
dep_node: format!("{:?}", dep_node), dep_node: format!("{dep_node:?}"),
}); });
panic!("Found unstable fingerprints for {:?}: {:?}", dep_node, result); panic!("Found unstable fingerprints for {dep_node:?}: {result:?}");
} }
INSIDE_VERIFY_PANIC.with(|in_panic| in_panic.set(old_in_panic)); INSIDE_VERIFY_PANIC.with(|in_panic| in_panic.set(old_in_panic));

View file

@ -303,7 +303,7 @@ impl DefId {
// i.e. don't use closures. // i.e. don't use closures.
match self.as_local() { match self.as_local() {
Some(local_def_id) => local_def_id, Some(local_def_id) => local_def_id,
None => panic!("DefId::expect_local: `{:?}` isn't local", self), None => panic!("DefId::expect_local: `{self:?}` isn't local"),
} }
} }

View file

@ -44,7 +44,7 @@ impl fmt::Display for Edition {
Edition::Edition2021 => "2021", Edition::Edition2021 => "2021",
Edition::Edition2024 => "2024", Edition::Edition2024 => "2024",
}; };
write!(f, "{}", s) write!(f, "{s}")
} }
} }

View file

@ -110,7 +110,7 @@ fn assert_default_hashing_controls<CTX: HashStableContext>(ctx: &CTX, msg: &str)
// Such configuration must not be used for metadata. // Such configuration must not be used for metadata.
HashingControls { hash_spans } HashingControls { hash_spans }
if hash_spans == !ctx.unstable_opts_incremental_ignore_spans() => {} if hash_spans == !ctx.unstable_opts_incremental_ignore_spans() => {}
other => panic!("Attempted hashing of {msg} with non-default HashingControls: {:?}", other), other => panic!("Attempted hashing of {msg} with non-default HashingControls: {other:?}"),
} }
} }
@ -629,7 +629,7 @@ pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symb
pub fn debug_hygiene_data(verbose: bool) -> String { pub fn debug_hygiene_data(verbose: bool) -> String {
HygieneData::with(|data| { HygieneData::with(|data| {
if verbose { if verbose {
format!("{:#?}", data) format!("{data:#?}")
} else { } else {
let mut s = String::from("Expansions:"); let mut s = String::from("Expansions:");
let mut debug_expn_data = |(id, expn_data): (&ExpnId, &ExpnData)| { let mut debug_expn_data = |(id, expn_data): (&ExpnId, &ExpnData)| {
@ -1067,9 +1067,9 @@ impl ExpnKind {
match *self { match *self {
ExpnKind::Root => kw::PathRoot.to_string(), ExpnKind::Root => kw::PathRoot.to_string(),
ExpnKind::Macro(macro_kind, name) => match macro_kind { ExpnKind::Macro(macro_kind, name) => match macro_kind {
MacroKind::Bang => format!("{}!", name), MacroKind::Bang => format!("{name}!"),
MacroKind::Attr => format!("#[{}]", name), MacroKind::Attr => format!("#[{name}]"),
MacroKind::Derive => format!("#[derive({})]", name), MacroKind::Derive => format!("#[derive({name})]"),
}, },
ExpnKind::AstPass(kind) => kind.descr().to_string(), ExpnKind::AstPass(kind) => kind.descr().to_string(),
ExpnKind::Desugaring(kind) => format!("desugaring of {}", kind.descr()), ExpnKind::Desugaring(kind) => format!("desugaring of {}", kind.descr()),
@ -1466,11 +1466,7 @@ impl<D: Decoder> Decodable<D> for SyntaxContext {
/// collisions are only possible between `ExpnId`s within the same crate. /// collisions are only possible between `ExpnId`s within the same crate.
fn update_disambiguator(expn_data: &mut ExpnData, mut ctx: impl HashStableContext) -> ExpnHash { fn update_disambiguator(expn_data: &mut ExpnData, mut ctx: impl HashStableContext) -> ExpnHash {
// This disambiguator should not have been set yet. // This disambiguator should not have been set yet.
assert_eq!( assert_eq!(expn_data.disambiguator, 0, "Already set disambiguator for ExpnData: {expn_data:?}");
expn_data.disambiguator, 0,
"Already set disambiguator for ExpnData: {:?}",
expn_data
);
assert_default_hashing_controls(&ctx, "ExpnData (disambiguator)"); assert_default_hashing_controls(&ctx, "ExpnData (disambiguator)");
let mut expn_hash = expn_data.hash_expn(&mut ctx); let mut expn_hash = expn_data.hash_expn(&mut ctx);

View file

@ -329,7 +329,7 @@ impl fmt::Display for FileNameDisplay<'_> {
ProcMacroSourceCode(_) => write!(fmt, "<proc-macro source code>"), ProcMacroSourceCode(_) => write!(fmt, "<proc-macro source code>"),
CfgSpec(_) => write!(fmt, "<cfgspec>"), CfgSpec(_) => write!(fmt, "<cfgspec>"),
CliCrateAttr(_) => write!(fmt, "<crate attribute>"), CliCrateAttr(_) => write!(fmt, "<crate attribute>"),
Custom(ref s) => write!(fmt, "<{}>", s), Custom(ref s) => write!(fmt, "<{s}>"),
DocTest(ref path, _) => write!(fmt, "{}", path.display()), DocTest(ref path, _) => write!(fmt, "{}", path.display()),
InlineAsm(_) => write!(fmt, "<inline asm>"), InlineAsm(_) => write!(fmt, "<inline asm>"),
} }
@ -1074,7 +1074,7 @@ impl NonNarrowChar {
0 => NonNarrowChar::ZeroWidth(pos), 0 => NonNarrowChar::ZeroWidth(pos),
2 => NonNarrowChar::Wide(pos), 2 => NonNarrowChar::Wide(pos),
4 => NonNarrowChar::Tab(pos), 4 => NonNarrowChar::Tab(pos),
_ => panic!("width {} given for non-narrow character", width), _ => panic!("width {width} given for non-narrow character"),
} }
} }

View file

@ -27,7 +27,7 @@ impl SpannedEventArgRecorder for EventArgRecorder<'_> {
if let Some(source_map) = &*session_globals.source_map.borrow() { if let Some(source_map) = &*session_globals.source_map.borrow() {
source_map.span_to_embeddable_string(span) source_map.span_to_embeddable_string(span)
} else { } else {
format!("{:?}", span) format!("{span:?}")
} }
}); });
self.record_arg(span_arg); self.record_arg(span_arg);

View file

@ -175,7 +175,7 @@ impl SymbolPath {
fn finish(mut self, hash: u64) -> String { fn finish(mut self, hash: u64) -> String {
self.finalize_pending_component(); self.finalize_pending_component();
// E = end name-sequence // E = end name-sequence
let _ = write!(self.result, "17h{:016x}E", hash); let _ = write!(self.result, "17h{hash:016x}E");
self.result self.result
} }
} }
@ -227,7 +227,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolPrinter<'tcx> {
self = self.print_type(ty)?; self = self.print_type(ty)?;
self.write_str("; ")?; self.write_str("; ")?;
if let Some(size) = size.kind().try_to_bits(self.tcx().data_layout.pointer_size) { if let Some(size) = size.kind().try_to_bits(self.tcx().data_layout.pointer_size) {
write!(self, "{}", size)? write!(self, "{size}")?
} else if let ty::ConstKind::Param(param) = size.kind() { } else if let ty::ConstKind::Param(param) = size.kind() {
self = param.print(self)? self = param.print(self)?
} else { } else {

View file

@ -269,8 +269,7 @@ fn compute_symbol_name<'tcx>(
debug_assert!( debug_assert!(
rustc_demangle::try_demangle(&symbol).is_ok(), rustc_demangle::try_demangle(&symbol).is_ok(),
"compute_symbol_name: `{}` cannot be demangled", "compute_symbol_name: `{symbol}` cannot be demangled"
symbol
); );
symbol symbol

View file

@ -74,7 +74,7 @@ impl SymbolNamesTest<'_> {
tcx.sess.emit_err(TestOutput { tcx.sess.emit_err(TestOutput {
span: attr.span, span: attr.span,
kind: Kind::DemanglingAlt, kind: Kind::DemanglingAlt,
content: format!("{:#}", demangling), content: format!("{demangling:#}"),
}); });
} }
} }

View file

@ -126,11 +126,11 @@ fn encode_const<'tcx>(
if value < zero { if value < zero {
s.push('n') s.push('n')
}; };
let _ = write!(s, "{}", value); let _ = write!(s, "{value}");
} }
fn push_unsigned_value<T: Display>(s: &mut String, value: T) { fn push_unsigned_value<T: Display>(s: &mut String, value: T) {
let _ = write!(s, "{}", value); let _ = write!(s, "{value}");
} }
if let Some(scalar_int) = c.kind().try_to_scalar_int() { if let Some(scalar_int) = c.kind().try_to_scalar_int() {

View file

@ -609,7 +609,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
bits = val.unsigned_abs(); bits = val.unsigned_abs();
} }
let _ = write!(self.out, "{:x}_", bits); let _ = write!(self.out, "{bits:x}_");
} }
// FIXME(valtrees): Remove the special case for `str` // FIXME(valtrees): Remove the special case for `str`
@ -637,7 +637,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
// FIXME(eddyb) use a specialized hex-encoding loop. // FIXME(eddyb) use a specialized hex-encoding loop.
for byte in s.bytes() { for byte in s.bytes() {
let _ = write!(self.out, "{:02x}", byte); let _ = write!(self.out, "{byte:02x}");
} }
self.push("_"); self.push("_");

View file

@ -177,12 +177,12 @@ impl Reg {
17..=32 => dl.i32_align.abi, 17..=32 => dl.i32_align.abi,
33..=64 => dl.i64_align.abi, 33..=64 => dl.i64_align.abi,
65..=128 => dl.i128_align.abi, 65..=128 => dl.i128_align.abi,
_ => panic!("unsupported integer: {:?}", self), _ => panic!("unsupported integer: {self:?}"),
}, },
RegKind::Float => match self.size.bits() { RegKind::Float => match self.size.bits() {
32 => dl.f32_align.abi, 32 => dl.f32_align.abi,
64 => dl.f64_align.abi, 64 => dl.f64_align.abi,
_ => panic!("unsupported float: {:?}", self), _ => panic!("unsupported float: {self:?}"),
}, },
RegKind::Vector => dl.vector_align(self.size).abi, RegKind::Vector => dl.vector_align(self.size).abi,
} }
@ -642,7 +642,7 @@ impl fmt::Display for AdjustForForeignAbiError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Self::Unsupported { arch, abi } => { Self::Unsupported { arch, abi } => {
write!(f, "target architecture {:?} does not support `extern {}` ABI", arch, abi) write!(f, "target architecture {arch:?} does not support `extern {abi}` ABI")
} }
} }
} }
@ -760,7 +760,7 @@ impl FromStr for Conv {
"AmdGpuKernel" => Ok(Conv::AmdGpuKernel), "AmdGpuKernel" => Ok(Conv::AmdGpuKernel),
"AvrInterrupt" => Ok(Conv::AvrInterrupt), "AvrInterrupt" => Ok(Conv::AvrInterrupt),
"AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt), "AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt),
_ => Err(format!("'{}' is not a valid value for entry function call convetion.", s)), _ => Err(format!("'{s}' is not a valid value for entry function call convetion.")),
} }
} }
} }

View file

@ -195,6 +195,6 @@ impl AArch64InlineAsmReg {
(modifier.unwrap_or('v'), self as u32 - Self::v0 as u32) (modifier.unwrap_or('v'), self as u32 - Self::v0 as u32)
}; };
assert!(index < 32); assert!(index < 32);
write!(out, "{}{}", prefix, index) write!(out, "{prefix}{index}")
} }
} }

View file

@ -249,7 +249,7 @@ impl ArmInlineAsmReg {
let index = self as u32 - Self::q0 as u32; let index = self as u32 - Self::q0 as u32;
assert!(index < 16); assert!(index < 16);
let index = index * 2 + (modifier == 'f') as u32; let index = index * 2 + (modifier == 'f') as u32;
write!(out, "d{}", index) write!(out, "d{index}")
} else { } else {
out.write_str(self.name()) out.write_str(self.name())
} }

View file

@ -679,13 +679,13 @@ impl fmt::Display for InlineAsmType {
Self::I128 => f.write_str("i128"), Self::I128 => f.write_str("i128"),
Self::F32 => f.write_str("f32"), Self::F32 => f.write_str("f32"),
Self::F64 => f.write_str("f64"), Self::F64 => f.write_str("f64"),
Self::VecI8(n) => write!(f, "i8x{}", n), Self::VecI8(n) => write!(f, "i8x{n}"),
Self::VecI16(n) => write!(f, "i16x{}", n), Self::VecI16(n) => write!(f, "i16x{n}"),
Self::VecI32(n) => write!(f, "i32x{}", n), Self::VecI32(n) => write!(f, "i32x{n}"),
Self::VecI64(n) => write!(f, "i64x{}", n), Self::VecI64(n) => write!(f, "i64x{n}"),
Self::VecI128(n) => write!(f, "i128x{}", n), Self::VecI128(n) => write!(f, "i128x{n}"),
Self::VecF32(n) => write!(f, "f32x{}", n), Self::VecF32(n) => write!(f, "f32x{n}"),
Self::VecF64(n) => write!(f, "f64x{}", n), Self::VecF64(n) => write!(f, "f64x{n}"),
} }
} }
} }

View file

@ -357,28 +357,28 @@ impl X86InlineAsmReg {
if self as u32 <= Self::dx as u32 { if self as u32 <= Self::dx as u32 {
let root = ['a', 'b', 'c', 'd'][self as usize - Self::ax as usize]; let root = ['a', 'b', 'c', 'd'][self as usize - Self::ax as usize];
match modifier.unwrap_or(reg_default_modifier) { match modifier.unwrap_or(reg_default_modifier) {
'l' => write!(out, "{}l", root), 'l' => write!(out, "{root}l"),
'h' => write!(out, "{}h", root), 'h' => write!(out, "{root}h"),
'x' => write!(out, "{}x", root), 'x' => write!(out, "{root}x"),
'e' => write!(out, "e{}x", root), 'e' => write!(out, "e{root}x"),
'r' => write!(out, "r{}x", root), 'r' => write!(out, "r{root}x"),
_ => unreachable!(), _ => unreachable!(),
} }
} else if self as u32 <= Self::di as u32 { } else if self as u32 <= Self::di as u32 {
let root = self.name(); let root = self.name();
match modifier.unwrap_or(reg_default_modifier) { match modifier.unwrap_or(reg_default_modifier) {
'l' => write!(out, "{}l", root), 'l' => write!(out, "{root}l"),
'x' => write!(out, "{}", root), 'x' => write!(out, "{root}"),
'e' => write!(out, "e{}", root), 'e' => write!(out, "e{root}"),
'r' => write!(out, "r{}", root), 'r' => write!(out, "r{root}"),
_ => unreachable!(), _ => unreachable!(),
} }
} else if self as u32 <= Self::r15 as u32 { } else if self as u32 <= Self::r15 as u32 {
let root = self.name(); let root = self.name();
match modifier.unwrap_or(reg_default_modifier) { match modifier.unwrap_or(reg_default_modifier) {
'l' => write!(out, "{}b", root), 'l' => write!(out, "{root}b"),
'x' => write!(out, "{}w", root), 'x' => write!(out, "{root}w"),
'e' => write!(out, "{}d", root), 'e' => write!(out, "{root}d"),
'r' => out.write_str(root), 'r' => out.write_str(root),
_ => unreachable!(), _ => unreachable!(),
} }
@ -387,15 +387,15 @@ impl X86InlineAsmReg {
} else if self as u32 <= Self::xmm15 as u32 { } else if self as u32 <= Self::xmm15 as u32 {
let prefix = modifier.unwrap_or('x'); let prefix = modifier.unwrap_or('x');
let index = self as u32 - Self::xmm0 as u32; let index = self as u32 - Self::xmm0 as u32;
write!(out, "{}{}", prefix, index) write!(out, "{prefix}{index}")
} else if self as u32 <= Self::ymm15 as u32 { } else if self as u32 <= Self::ymm15 as u32 {
let prefix = modifier.unwrap_or('y'); let prefix = modifier.unwrap_or('y');
let index = self as u32 - Self::ymm0 as u32; let index = self as u32 - Self::ymm0 as u32;
write!(out, "{}{}", prefix, index) write!(out, "{prefix}{index}")
} else if self as u32 <= Self::zmm31 as u32 { } else if self as u32 <= Self::zmm31 as u32 {
let prefix = modifier.unwrap_or('z'); let prefix = modifier.unwrap_or('z');
let index = self as u32 - Self::zmm0 as u32; let index = self as u32 - Self::zmm0 as u32;
write!(out, "{}{}", prefix, index) write!(out, "{prefix}{index}")
} else { } else {
out.write_str(self.name()) out.write_str(self.name())
} }

View file

@ -76,7 +76,7 @@ impl Arch {
fn pre_link_args(os: &'static str, arch: Arch, abi: &'static str) -> LinkArgs { fn pre_link_args(os: &'static str, arch: Arch, abi: &'static str) -> LinkArgs {
let platform_name: StaticCow<str> = match abi { let platform_name: StaticCow<str> = match abi {
"sim" => format!("{}-simulator", os).into(), "sim" => format!("{os}-simulator").into(),
"macabi" => "mac-catalyst".into(), "macabi" => "mac-catalyst".into(),
_ => os.into(), _ => os.into(),
}; };
@ -193,7 +193,7 @@ fn macos_deployment_target(arch: Arch) -> (u32, u32) {
fn macos_lld_platform_version(arch: Arch) -> String { fn macos_lld_platform_version(arch: Arch) -> String {
let (major, minor) = macos_deployment_target(arch); let (major, minor) = macos_deployment_target(arch);
format!("{}.{}", major, minor) format!("{major}.{minor}")
} }
pub fn macos_llvm_target(arch: Arch) -> String { pub fn macos_llvm_target(arch: Arch) -> String {
@ -252,7 +252,7 @@ pub fn ios_llvm_target(arch: Arch) -> String {
fn ios_lld_platform_version() -> String { fn ios_lld_platform_version() -> String {
let (major, minor) = ios_deployment_target(); let (major, minor) = ios_deployment_target();
format!("{}.{}", major, minor) format!("{major}.{minor}")
} }
pub fn ios_sim_llvm_target(arch: Arch) -> String { pub fn ios_sim_llvm_target(arch: Arch) -> String {
@ -266,7 +266,7 @@ fn tvos_deployment_target() -> (u32, u32) {
fn tvos_lld_platform_version() -> String { fn tvos_lld_platform_version() -> String {
let (major, minor) = tvos_deployment_target(); let (major, minor) = tvos_deployment_target();
format!("{}.{}", major, minor) format!("{major}.{minor}")
} }
fn watchos_deployment_target() -> (u32, u32) { fn watchos_deployment_target() -> (u32, u32) {
@ -275,7 +275,7 @@ fn watchos_deployment_target() -> (u32, u32) {
fn watchos_lld_platform_version() -> String { fn watchos_lld_platform_version() -> String {
let (major, minor) = watchos_deployment_target(); let (major, minor) = watchos_deployment_target();
format!("{}.{}", major, minor) format!("{major}.{minor}")
} }
pub fn watchos_sim_llvm_target(arch: Arch) -> String { pub fn watchos_sim_llvm_target(arch: Arch) -> String {

View file

@ -840,7 +840,7 @@ impl fmt::Display for SanitizerSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut first = true; let mut first = true;
for s in *self { for s in *self {
let name = s.as_str().unwrap_or_else(|| panic!("unrecognized sanitizer {:?}", s)); let name = s.as_str().unwrap_or_else(|| panic!("unrecognized sanitizer {s:?}"));
if !first { if !first {
f.write_str(", ")?; f.write_str(", ")?;
} }
@ -2074,7 +2074,7 @@ impl Target {
let mut get_req_field = |name: &str| { let mut get_req_field = |name: &str| {
obj.remove(name) obj.remove(name)
.and_then(|j| j.as_str().map(str::to_string)) .and_then(|j| j.as_str().map(str::to_string))
.ok_or_else(|| format!("Field {} in target specification is required", name)) .ok_or_else(|| format!("Field {name} in target specification is required"))
}; };
let mut base = Target { let mut base = Target {
@ -2480,7 +2480,7 @@ impl Target {
if let Some(s) = fp.as_str() { if let Some(s) = fp.as_str() {
base.frame_pointer = s base.frame_pointer = s
.parse() .parse()
.map_err(|()| format!("'{}' is not a valid value for frame-pointer", s))?; .map_err(|()| format!("'{s}' is not a valid value for frame-pointer"))?;
} else { } else {
incorrect_type.push("frame-pointer".into()) incorrect_type.push("frame-pointer".into())
} }
@ -2672,7 +2672,7 @@ impl Target {
return load_file(&p); return load_file(&p);
} }
Err(format!("Could not find specification for target {:?}", target_triple)) Err(format!("Could not find specification for target {target_triple:?}"))
} }
TargetTriple::TargetJson { ref contents, .. } => { TargetTriple::TargetJson { ref contents, .. } => {
let obj = serde_json::from_str(contents).map_err(|e| e.to_string())?; let obj = serde_json::from_str(contents).map_err(|e| e.to_string())?;
@ -2936,7 +2936,7 @@ impl TargetTriple {
let contents = std::fs::read_to_string(&canonicalized_path).map_err(|err| { let contents = std::fs::read_to_string(&canonicalized_path).map_err(|err| {
io::Error::new( io::Error::new(
io::ErrorKind::InvalidInput, io::ErrorKind::InvalidInput,
format!("target path {:?} is not a valid file: {}", canonicalized_path, err), format!("target path {canonicalized_path:?} is not a valid file: {err}"),
) )
})?; })?;
let triple = canonicalized_path let triple = canonicalized_path
@ -2971,7 +2971,7 @@ impl TargetTriple {
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
content.hash(&mut hasher); content.hash(&mut hasher);
let hash = hasher.finish(); let hash = hasher.finish();
format!("{}-{}", triple, hash) format!("{triple}-{hash}")
} }
} }
} }

View file

@ -3,7 +3,7 @@ use crate::spec::TargetOptions;
pub fn opts(kernel: &str) -> TargetOptions { pub fn opts(kernel: &str) -> TargetOptions {
TargetOptions { TargetOptions {
os: format!("solid_{}", kernel).into(), os: format!("solid_{kernel}").into(),
vendor: "kmc".into(), vendor: "kmc".into(),
executables: false, executables: false,
frame_pointer: FramePointer::NonLeaf, frame_pointer: FramePointer::NonLeaf,

View file

@ -189,7 +189,7 @@ fn dtorck_constraint_for_ty<'tcx>(
tcx.sess.delay_span_bug( tcx.sess.delay_span_bug(
span, span,
&format!("upvar_tys for closure not found. Expected capture information for closure {}", ty,), &format!("upvar_tys for closure not found. Expected capture information for closure {ty}",),
); );
return Err(NoSolution); return Err(NoSolution);
} }
@ -231,7 +231,7 @@ fn dtorck_constraint_for_ty<'tcx>(
// be fully resolved. // be fully resolved.
tcx.sess.delay_span_bug( tcx.sess.delay_span_bug(
span, span,
&format!("upvar_tys for generator not found. Expected capture information for generator {}", ty,), &format!("upvar_tys for generator not found. Expected capture information for generator {ty}",),
); );
return Err(NoSolution); return Err(NoSolution);
} }

View file

@ -47,7 +47,7 @@ fn try_normalize_after_erasing_regions<'tcx, T: TypeFoldable<'tcx> + PartialEq +
// us a test case. // us a test case.
debug_assert_eq!(normalized_value, resolved_value); debug_assert_eq!(normalized_value, resolved_value);
let erased = infcx.tcx.erase_regions(resolved_value); let erased = infcx.tcx.erase_regions(resolved_value);
debug_assert!(!erased.needs_infer(), "{:?}", erased); debug_assert!(!erased.needs_infer(), "{erased:?}");
Ok(erased) Ok(erased)
} }
Err(NoSolution) => Err(NoSolution), Err(NoSolution) => Err(NoSolution),

View file

@ -24,7 +24,7 @@ impl fmt::Debug for Byte {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self { match &self {
Self::Uninit => f.write_str("??u8"), Self::Uninit => f.write_str("??u8"),
Self::Init(b) => write!(f, "{:#04x}u8", b), Self::Init(b) => write!(f, "{b:#04x}u8"),
} }
} }
} }

View file

@ -718,9 +718,9 @@ impl fmt::Debug for InferTy {
TyVar(ref v) => v.fmt(f), TyVar(ref v) => v.fmt(f),
IntVar(ref v) => v.fmt(f), IntVar(ref v) => v.fmt(f),
FloatVar(ref v) => v.fmt(f), FloatVar(ref v) => v.fmt(f),
FreshTy(v) => write!(f, "FreshTy({:?})", v), FreshTy(v) => write!(f, "FreshTy({v:?})"),
FreshIntTy(v) => write!(f, "FreshIntTy({:?})", v), FreshIntTy(v) => write!(f, "FreshIntTy({v:?})"),
FreshFloatTy(v) => write!(f, "FreshFloatTy({:?})", v), FreshFloatTy(v) => write!(f, "FreshFloatTy({v:?})"),
} }
} }
} }
@ -743,9 +743,9 @@ impl fmt::Display for InferTy {
TyVar(_) => write!(f, "_"), TyVar(_) => write!(f, "_"),
IntVar(_) => write!(f, "{}", "{integer}"), IntVar(_) => write!(f, "{}", "{integer}"),
FloatVar(_) => write!(f, "{}", "{float}"), FloatVar(_) => write!(f, "{}", "{float}"),
FreshTy(v) => write!(f, "FreshTy({})", v), FreshTy(v) => write!(f, "FreshTy({v})"),
FreshIntTy(v) => write!(f, "FreshIntTy({})", v), FreshIntTy(v) => write!(f, "FreshIntTy({v})"),
FreshFloatTy(v) => write!(f, "FreshFloatTy({})", v), FreshFloatTy(v) => write!(f, "FreshFloatTy({v})"),
} }
} }
} }

View file

@ -1028,10 +1028,10 @@ impl<I: Interner> hash::Hash for RegionKind<I> {
impl<I: Interner> fmt::Debug for RegionKind<I> { impl<I: Interner> fmt::Debug for RegionKind<I> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
ReEarlyBound(data) => write!(f, "ReEarlyBound({:?})", data), ReEarlyBound(data) => write!(f, "ReEarlyBound({data:?})"),
ReLateBound(binder_id, bound_region) => { ReLateBound(binder_id, bound_region) => {
write!(f, "ReLateBound({:?}, {:?})", binder_id, bound_region) write!(f, "ReLateBound({binder_id:?}, {bound_region:?})")
} }
ReFree(fr) => fr.fmt(f), ReFree(fr) => fr.fmt(f),
@ -1040,7 +1040,7 @@ impl<I: Interner> fmt::Debug for RegionKind<I> {
ReVar(vid) => vid.fmt(f), ReVar(vid) => vid.fmt(f),
RePlaceholder(placeholder) => write!(f, "RePlaceholder({:?})", placeholder), RePlaceholder(placeholder) => write!(f, "RePlaceholder({placeholder:?})"),
ReErased => f.write_str("ReErased"), ReErased => f.write_str("ReErased"),
} }