Fix more clippy warnings
Fixes more of: clippy::unused_unit clippy::op_ref clippy::useless_format clippy::needless_return clippy::useless_conversion clippy::bind_instead_of_map clippy::into_iter_on_ref clippy::redundant_clone clippy::nonminimal_bool clippy::redundant_closure clippy::option_as_ref_deref clippy::len_zero clippy::iter_cloned_collect clippy::filter_next
This commit is contained in:
parent
feb3536eba
commit
58023fedfc
18 changed files with 25 additions and 34 deletions
|
@ -392,7 +392,7 @@ impl TokenStream {
|
|||
break;
|
||||
}
|
||||
}
|
||||
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
|
||||
token_trees = out.into_iter().map(TokenTree::Token).collect();
|
||||
if token_trees.len() != 1 {
|
||||
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
||||
}
|
||||
|
|
|
@ -1237,10 +1237,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
) => {
|
||||
assert!(!*late);
|
||||
let out_op_sp = if input { op_sp2 } else { op_sp };
|
||||
let msg = &format!(
|
||||
"use `lateout` instead of \
|
||||
`out` to avoid conflict"
|
||||
);
|
||||
let msg = "use `lateout` instead of \
|
||||
`out` to avoid conflict";
|
||||
err.span_help(out_op_sp, msg);
|
||||
}
|
||||
_ => {}
|
||||
|
|
|
@ -457,7 +457,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
|||
|
||||
let mut chars = arg.format.ty.chars();
|
||||
let mut modifier = chars.next();
|
||||
if !chars.next().is_none() {
|
||||
if chars.next().is_some() {
|
||||
let span = arg
|
||||
.format
|
||||
.ty_span
|
||||
|
|
|
@ -63,7 +63,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
.tcx()
|
||||
.destructure_const(ty::ParamEnv::reveal_all().and(&c))
|
||||
.fields
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|field| {
|
||||
if let Some(prim) = field.val.try_to_scalar() {
|
||||
let layout = bx.layout_of(field_ty);
|
||||
|
|
|
@ -159,14 +159,10 @@ impl AnnotateSnippetEmitterWriter {
|
|||
// FIXME(#59346): Not really sure when `fold` should be true or false
|
||||
fold: false,
|
||||
annotations: annotations
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|annotation| SourceAnnotation {
|
||||
range: (annotation.start_col, annotation.end_col),
|
||||
label: annotation
|
||||
.label
|
||||
.as_ref()
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or_default(),
|
||||
label: annotation.label.as_deref().unwrap_or_default(),
|
||||
annotation_type: annotation_type_for_level(*level),
|
||||
})
|
||||
.collect(),
|
||||
|
|
|
@ -550,7 +550,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
let error_code = error_code.into();
|
||||
let mut err = self.tcx.sess.struct_span_err_with_code(
|
||||
local_visitor.target_span,
|
||||
&format!("type annotations needed"),
|
||||
"type annotations needed",
|
||||
error_code,
|
||||
);
|
||||
|
||||
|
|
|
@ -77,8 +77,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
|||
}
|
||||
_ => {}
|
||||
}
|
||||
let mut type_param_span: MultiSpan =
|
||||
visitor.types.iter().cloned().collect::<Vec<_>>().into();
|
||||
let mut type_param_span: MultiSpan = visitor.types.to_vec().into();
|
||||
for &span in &visitor.types {
|
||||
type_param_span.push_span_label(
|
||||
span,
|
||||
|
|
|
@ -187,9 +187,9 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
|
|||
// Ok, this is a shebang but if the next non-whitespace token is `[` or maybe
|
||||
// a doc comment (due to `TokenKind::(Line,Block)Comment` ambiguity at lexer level),
|
||||
// then it may be valid Rust code, so consider it Rust code.
|
||||
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).filter(|tok|
|
||||
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok|
|
||||
!matches!(tok, TokenKind::Whitespace | TokenKind::LineComment | TokenKind::BlockComment { .. })
|
||||
).next();
|
||||
);
|
||||
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
|
||||
// No other choice than to consider this a shebang.
|
||||
return Some(2 + first_line_tail.len());
|
||||
|
|
|
@ -309,9 +309,7 @@ pub fn const_eval_raw_provider<'tcx>(
|
|||
|
||||
let res = ecx.load_mir(cid.instance.def, cid.promoted);
|
||||
res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body))
|
||||
.and_then(|place| {
|
||||
Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
|
||||
})
|
||||
.map(|place| RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
|
||||
.map_err(|error| {
|
||||
let err = error_to_const_error(&ecx, error);
|
||||
// errors in statics are always emitted as fatal errors
|
||||
|
|
|
@ -51,7 +51,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PackedRefChecker<'a, 'tcx> {
|
|||
lint_root,
|
||||
source_info.span,
|
||||
|lint| {
|
||||
lint.build(&format!("reference to packed field is unaligned",))
|
||||
lint.build("reference to packed field is unaligned")
|
||||
.note(
|
||||
"fields of packed structs are not properly aligned, and creating \
|
||||
a misaligned reference is undefined behavior (even if that \
|
||||
|
|
|
@ -111,7 +111,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> {
|
|||
copied_to_return_place = Some(returned_local);
|
||||
}
|
||||
|
||||
return copied_to_return_place;
|
||||
copied_to_return_place
|
||||
}
|
||||
|
||||
fn find_local_assigned_to_return_place(
|
||||
|
@ -136,7 +136,7 @@ fn find_local_assigned_to_return_place(
|
|||
}
|
||||
}
|
||||
|
||||
return None;
|
||||
None
|
||||
}
|
||||
|
||||
// If this statement is an assignment of an unprojected local to the return place,
|
||||
|
|
|
@ -99,7 +99,7 @@ fn get_arm_identity_info<'a, 'tcx>(stmts: &'a [Statement<'tcx>]) -> Option<ArmId
|
|||
fn try_eat<'a, 'tcx>(
|
||||
stmt_iter: &mut StmtIter<'a, 'tcx>,
|
||||
test: impl Fn(&'a Statement<'tcx>) -> bool,
|
||||
mut action: impl FnMut(usize, &'a Statement<'tcx>) -> (),
|
||||
mut action: impl FnMut(usize, &'a Statement<'tcx>),
|
||||
) {
|
||||
while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) {
|
||||
let (idx, stmt) = stmt_iter.next().unwrap();
|
||||
|
@ -271,7 +271,7 @@ fn optimization_applies<'tcx>(
|
|||
}
|
||||
|
||||
// Verify the assigment chain consists of the form b = a; c = b; d = c; etc...
|
||||
if opt_info.field_tmp_assignments.len() == 0 {
|
||||
if opt_info.field_tmp_assignments.is_empty() {
|
||||
trace!("NO: no assignments found");
|
||||
}
|
||||
let mut last_assigned_to = opt_info.field_tmp_assignments[0].1;
|
||||
|
|
|
@ -401,7 +401,7 @@ impl<'a> StringReader<'a> {
|
|||
let content_end = suffix_start - BytePos(postfix_len);
|
||||
let id = self.symbol_from_to(content_start, content_end);
|
||||
self.validate_literal_escape(mode, content_start, content_end);
|
||||
return (lit_kind, id);
|
||||
(lit_kind, id)
|
||||
}
|
||||
|
||||
pub fn pos(&self) -> BytePos {
|
||||
|
|
|
@ -936,7 +936,7 @@ impl<'a> Parser<'a> {
|
|||
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
|
||||
// The current token is in the same line as the prior token, not recoverable.
|
||||
} else if [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
&& &self.prev_token.kind == &token::CloseDelim(token::Paren)
|
||||
&& self.prev_token.kind == token::CloseDelim(token::Paren)
|
||||
{
|
||||
// Likely typo: The current token is on a new line and is expected to be
|
||||
// `.`, `;`, `?`, or an operator after a close delimiter token.
|
||||
|
|
|
@ -193,7 +193,7 @@ impl TokenCursor {
|
|||
tree,
|
||||
self.stack.len()
|
||||
);
|
||||
collecting.buf.push(tree.clone().into())
|
||||
collecting.buf.push(tree.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -675,7 +675,7 @@ impl<'a> Parser<'a> {
|
|||
// If this was a missing `@` in a binding pattern
|
||||
// bail with a suggestion
|
||||
// https://github.com/rust-lang/rust/issues/72373
|
||||
if self.prev_token.is_ident() && &self.token.kind == &token::DotDot {
|
||||
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
|
||||
let msg = format!(
|
||||
"if you meant to bind the contents of \
|
||||
the rest of the array pattern into `{}`, use `@`",
|
||||
|
@ -1193,7 +1193,7 @@ impl<'a> Parser<'a> {
|
|||
let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() {
|
||||
collecting.buf
|
||||
} else {
|
||||
let msg = format!("our vector went away?");
|
||||
let msg = "our vector went away?";
|
||||
debug!("collect_tokens: {}", msg);
|
||||
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
|
||||
// This can happen due to a bad interaction of two unrelated recovery mechanisms
|
||||
|
|
|
@ -232,7 +232,7 @@ impl ExprVisitor<'tcx> {
|
|||
// size).
|
||||
if let Some((in_expr, Some(in_asm_ty))) = tied_input {
|
||||
if in_asm_ty != asm_ty {
|
||||
let msg = &format!("incompatible types for asm inout argument");
|
||||
let msg = "incompatible types for asm inout argument";
|
||||
let mut err = self.tcx.sess.struct_span_err(vec![in_expr.span, expr.span], msg);
|
||||
err.span_label(
|
||||
in_expr.span,
|
||||
|
|
|
@ -126,7 +126,7 @@ impl<'a> SourceCollector<'a> {
|
|||
&self.scx.themes,
|
||||
);
|
||||
self.scx.fs.write(&cur, v.as_bytes())?;
|
||||
self.scx.local_sources.insert(p.clone(), href);
|
||||
self.scx.local_sources.insert(p, href);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -451,7 +451,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
|||
..
|
||||
},
|
||||
..
|
||||
})) => segments.first().and_then(|seg| Some(seg.ident.to_string())),
|
||||
})) => segments.first().map(|seg| seg.ident.to_string()),
|
||||
Some(hir::Node::Item(hir::Item {
|
||||
ident, kind: hir::ItemKind::Enum(..), ..
|
||||
}))
|
||||
|
|
Loading…
Add table
Reference in a new issue