Fix more clippy warnings
Fixes more of: clippy::unused_unit clippy::op_ref clippy::useless_format clippy::needless_return clippy::useless_conversion clippy::bind_instead_of_map clippy::into_iter_on_ref clippy::redundant_clone clippy::nonminimal_bool clippy::redundant_closure clippy::option_as_ref_deref clippy::len_zero clippy::iter_cloned_collect clippy::filter_next
This commit is contained in:
parent
feb3536eba
commit
58023fedfc
18 changed files with 25 additions and 34 deletions
|
@ -392,7 +392,7 @@ impl TokenStream {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
|
token_trees = out.into_iter().map(TokenTree::Token).collect();
|
||||||
if token_trees.len() != 1 {
|
if token_trees.len() != 1 {
|
||||||
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1237,10 +1237,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
) => {
|
) => {
|
||||||
assert!(!*late);
|
assert!(!*late);
|
||||||
let out_op_sp = if input { op_sp2 } else { op_sp };
|
let out_op_sp = if input { op_sp2 } else { op_sp };
|
||||||
let msg = &format!(
|
let msg = "use `lateout` instead of \
|
||||||
"use `lateout` instead of \
|
`out` to avoid conflict";
|
||||||
`out` to avoid conflict"
|
|
||||||
);
|
|
||||||
err.span_help(out_op_sp, msg);
|
err.span_help(out_op_sp, msg);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
|
@ -457,7 +457,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||||
|
|
||||||
let mut chars = arg.format.ty.chars();
|
let mut chars = arg.format.ty.chars();
|
||||||
let mut modifier = chars.next();
|
let mut modifier = chars.next();
|
||||||
if !chars.next().is_none() {
|
if chars.next().is_some() {
|
||||||
let span = arg
|
let span = arg
|
||||||
.format
|
.format
|
||||||
.ty_span
|
.ty_span
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
.tcx()
|
.tcx()
|
||||||
.destructure_const(ty::ParamEnv::reveal_all().and(&c))
|
.destructure_const(ty::ParamEnv::reveal_all().and(&c))
|
||||||
.fields
|
.fields
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|field| {
|
.map(|field| {
|
||||||
if let Some(prim) = field.val.try_to_scalar() {
|
if let Some(prim) = field.val.try_to_scalar() {
|
||||||
let layout = bx.layout_of(field_ty);
|
let layout = bx.layout_of(field_ty);
|
||||||
|
|
|
@ -159,14 +159,10 @@ impl AnnotateSnippetEmitterWriter {
|
||||||
// FIXME(#59346): Not really sure when `fold` should be true or false
|
// FIXME(#59346): Not really sure when `fold` should be true or false
|
||||||
fold: false,
|
fold: false,
|
||||||
annotations: annotations
|
annotations: annotations
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|annotation| SourceAnnotation {
|
.map(|annotation| SourceAnnotation {
|
||||||
range: (annotation.start_col, annotation.end_col),
|
range: (annotation.start_col, annotation.end_col),
|
||||||
label: annotation
|
label: annotation.label.as_deref().unwrap_or_default(),
|
||||||
.label
|
|
||||||
.as_ref()
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
annotation_type: annotation_type_for_level(*level),
|
annotation_type: annotation_type_for_level(*level),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
|
|
|
@ -550,7 +550,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||||
let error_code = error_code.into();
|
let error_code = error_code.into();
|
||||||
let mut err = self.tcx.sess.struct_span_err_with_code(
|
let mut err = self.tcx.sess.struct_span_err_with_code(
|
||||||
local_visitor.target_span,
|
local_visitor.target_span,
|
||||||
&format!("type annotations needed"),
|
"type annotations needed",
|
||||||
error_code,
|
error_code,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -77,8 +77,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
let mut type_param_span: MultiSpan =
|
let mut type_param_span: MultiSpan = visitor.types.to_vec().into();
|
||||||
visitor.types.iter().cloned().collect::<Vec<_>>().into();
|
|
||||||
for &span in &visitor.types {
|
for &span in &visitor.types {
|
||||||
type_param_span.push_span_label(
|
type_param_span.push_span_label(
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -187,9 +187,9 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
|
||||||
// Ok, this is a shebang but if the next non-whitespace token is `[` or maybe
|
// Ok, this is a shebang but if the next non-whitespace token is `[` or maybe
|
||||||
// a doc comment (due to `TokenKind::(Line,Block)Comment` ambiguity at lexer level),
|
// a doc comment (due to `TokenKind::(Line,Block)Comment` ambiguity at lexer level),
|
||||||
// then it may be valid Rust code, so consider it Rust code.
|
// then it may be valid Rust code, so consider it Rust code.
|
||||||
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).filter(|tok|
|
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok|
|
||||||
!matches!(tok, TokenKind::Whitespace | TokenKind::LineComment | TokenKind::BlockComment { .. })
|
!matches!(tok, TokenKind::Whitespace | TokenKind::LineComment | TokenKind::BlockComment { .. })
|
||||||
).next();
|
);
|
||||||
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
|
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
|
||||||
// No other choice than to consider this a shebang.
|
// No other choice than to consider this a shebang.
|
||||||
return Some(2 + first_line_tail.len());
|
return Some(2 + first_line_tail.len());
|
||||||
|
|
|
@ -309,9 +309,7 @@ pub fn const_eval_raw_provider<'tcx>(
|
||||||
|
|
||||||
let res = ecx.load_mir(cid.instance.def, cid.promoted);
|
let res = ecx.load_mir(cid.instance.def, cid.promoted);
|
||||||
res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body))
|
res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body))
|
||||||
.and_then(|place| {
|
.map(|place| RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
|
||||||
Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
|
|
||||||
})
|
|
||||||
.map_err(|error| {
|
.map_err(|error| {
|
||||||
let err = error_to_const_error(&ecx, error);
|
let err = error_to_const_error(&ecx, error);
|
||||||
// errors in statics are always emitted as fatal errors
|
// errors in statics are always emitted as fatal errors
|
||||||
|
|
|
@ -51,7 +51,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PackedRefChecker<'a, 'tcx> {
|
||||||
lint_root,
|
lint_root,
|
||||||
source_info.span,
|
source_info.span,
|
||||||
|lint| {
|
|lint| {
|
||||||
lint.build(&format!("reference to packed field is unaligned",))
|
lint.build("reference to packed field is unaligned")
|
||||||
.note(
|
.note(
|
||||||
"fields of packed structs are not properly aligned, and creating \
|
"fields of packed structs are not properly aligned, and creating \
|
||||||
a misaligned reference is undefined behavior (even if that \
|
a misaligned reference is undefined behavior (even if that \
|
||||||
|
|
|
@ -111,7 +111,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> {
|
||||||
copied_to_return_place = Some(returned_local);
|
copied_to_return_place = Some(returned_local);
|
||||||
}
|
}
|
||||||
|
|
||||||
return copied_to_return_place;
|
copied_to_return_place
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_local_assigned_to_return_place(
|
fn find_local_assigned_to_return_place(
|
||||||
|
@ -136,7 +136,7 @@ fn find_local_assigned_to_return_place(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return None;
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this statement is an assignment of an unprojected local to the return place,
|
// If this statement is an assignment of an unprojected local to the return place,
|
||||||
|
|
|
@ -99,7 +99,7 @@ fn get_arm_identity_info<'a, 'tcx>(stmts: &'a [Statement<'tcx>]) -> Option<ArmId
|
||||||
fn try_eat<'a, 'tcx>(
|
fn try_eat<'a, 'tcx>(
|
||||||
stmt_iter: &mut StmtIter<'a, 'tcx>,
|
stmt_iter: &mut StmtIter<'a, 'tcx>,
|
||||||
test: impl Fn(&'a Statement<'tcx>) -> bool,
|
test: impl Fn(&'a Statement<'tcx>) -> bool,
|
||||||
mut action: impl FnMut(usize, &'a Statement<'tcx>) -> (),
|
mut action: impl FnMut(usize, &'a Statement<'tcx>),
|
||||||
) {
|
) {
|
||||||
while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) {
|
while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) {
|
||||||
let (idx, stmt) = stmt_iter.next().unwrap();
|
let (idx, stmt) = stmt_iter.next().unwrap();
|
||||||
|
@ -271,7 +271,7 @@ fn optimization_applies<'tcx>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify the assigment chain consists of the form b = a; c = b; d = c; etc...
|
// Verify the assigment chain consists of the form b = a; c = b; d = c; etc...
|
||||||
if opt_info.field_tmp_assignments.len() == 0 {
|
if opt_info.field_tmp_assignments.is_empty() {
|
||||||
trace!("NO: no assignments found");
|
trace!("NO: no assignments found");
|
||||||
}
|
}
|
||||||
let mut last_assigned_to = opt_info.field_tmp_assignments[0].1;
|
let mut last_assigned_to = opt_info.field_tmp_assignments[0].1;
|
||||||
|
|
|
@ -401,7 +401,7 @@ impl<'a> StringReader<'a> {
|
||||||
let content_end = suffix_start - BytePos(postfix_len);
|
let content_end = suffix_start - BytePos(postfix_len);
|
||||||
let id = self.symbol_from_to(content_start, content_end);
|
let id = self.symbol_from_to(content_start, content_end);
|
||||||
self.validate_literal_escape(mode, content_start, content_end);
|
self.validate_literal_escape(mode, content_start, content_end);
|
||||||
return (lit_kind, id);
|
(lit_kind, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pos(&self) -> BytePos {
|
pub fn pos(&self) -> BytePos {
|
||||||
|
|
|
@ -936,7 +936,7 @@ impl<'a> Parser<'a> {
|
||||||
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
|
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
|
||||||
// The current token is in the same line as the prior token, not recoverable.
|
// The current token is in the same line as the prior token, not recoverable.
|
||||||
} else if [token::Comma, token::Colon].contains(&self.token.kind)
|
} else if [token::Comma, token::Colon].contains(&self.token.kind)
|
||||||
&& &self.prev_token.kind == &token::CloseDelim(token::Paren)
|
&& self.prev_token.kind == token::CloseDelim(token::Paren)
|
||||||
{
|
{
|
||||||
// Likely typo: The current token is on a new line and is expected to be
|
// Likely typo: The current token is on a new line and is expected to be
|
||||||
// `.`, `;`, `?`, or an operator after a close delimiter token.
|
// `.`, `;`, `?`, or an operator after a close delimiter token.
|
||||||
|
|
|
@ -193,7 +193,7 @@ impl TokenCursor {
|
||||||
tree,
|
tree,
|
||||||
self.stack.len()
|
self.stack.len()
|
||||||
);
|
);
|
||||||
collecting.buf.push(tree.clone().into())
|
collecting.buf.push(tree.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -675,7 +675,7 @@ impl<'a> Parser<'a> {
|
||||||
// If this was a missing `@` in a binding pattern
|
// If this was a missing `@` in a binding pattern
|
||||||
// bail with a suggestion
|
// bail with a suggestion
|
||||||
// https://github.com/rust-lang/rust/issues/72373
|
// https://github.com/rust-lang/rust/issues/72373
|
||||||
if self.prev_token.is_ident() && &self.token.kind == &token::DotDot {
|
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
|
||||||
let msg = format!(
|
let msg = format!(
|
||||||
"if you meant to bind the contents of \
|
"if you meant to bind the contents of \
|
||||||
the rest of the array pattern into `{}`, use `@`",
|
the rest of the array pattern into `{}`, use `@`",
|
||||||
|
@ -1193,7 +1193,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() {
|
let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() {
|
||||||
collecting.buf
|
collecting.buf
|
||||||
} else {
|
} else {
|
||||||
let msg = format!("our vector went away?");
|
let msg = "our vector went away?";
|
||||||
debug!("collect_tokens: {}", msg);
|
debug!("collect_tokens: {}", msg);
|
||||||
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
|
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
|
||||||
// This can happen due to a bad interaction of two unrelated recovery mechanisms
|
// This can happen due to a bad interaction of two unrelated recovery mechanisms
|
||||||
|
|
|
@ -232,7 +232,7 @@ impl ExprVisitor<'tcx> {
|
||||||
// size).
|
// size).
|
||||||
if let Some((in_expr, Some(in_asm_ty))) = tied_input {
|
if let Some((in_expr, Some(in_asm_ty))) = tied_input {
|
||||||
if in_asm_ty != asm_ty {
|
if in_asm_ty != asm_ty {
|
||||||
let msg = &format!("incompatible types for asm inout argument");
|
let msg = "incompatible types for asm inout argument";
|
||||||
let mut err = self.tcx.sess.struct_span_err(vec![in_expr.span, expr.span], msg);
|
let mut err = self.tcx.sess.struct_span_err(vec![in_expr.span, expr.span], msg);
|
||||||
err.span_label(
|
err.span_label(
|
||||||
in_expr.span,
|
in_expr.span,
|
||||||
|
|
|
@ -126,7 +126,7 @@ impl<'a> SourceCollector<'a> {
|
||||||
&self.scx.themes,
|
&self.scx.themes,
|
||||||
);
|
);
|
||||||
self.scx.fs.write(&cur, v.as_bytes())?;
|
self.scx.fs.write(&cur, v.as_bytes())?;
|
||||||
self.scx.local_sources.insert(p.clone(), href);
|
self.scx.local_sources.insert(p, href);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -451,7 +451,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
..
|
..
|
||||||
})) => segments.first().and_then(|seg| Some(seg.ident.to_string())),
|
})) => segments.first().map(|seg| seg.ident.to_string()),
|
||||||
Some(hir::Node::Item(hir::Item {
|
Some(hir::Node::Item(hir::Item {
|
||||||
ident, kind: hir::ItemKind::Enum(..), ..
|
ident, kind: hir::ItemKind::Enum(..), ..
|
||||||
}))
|
}))
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue