Rollup merge of #69656 - matthiaskrgr:iter_nth_zero, r=oli-obk
Use .next() instead of .nth(0) on iterators.
This commit is contained in:
commit
5d1433b1f4
12 changed files with 14 additions and 14 deletions
|
@ -1446,7 +1446,7 @@ impl<'tcx> Debug for TerminatorKind<'tcx> {
|
||||||
match successor_count {
|
match successor_count {
|
||||||
0 => Ok(()),
|
0 => Ok(()),
|
||||||
|
|
||||||
1 => write!(fmt, " -> {:?}", self.successors().nth(0).unwrap()),
|
1 => write!(fmt, " -> {:?}", self.successors().next().unwrap()),
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
write!(fmt, " -> [")?;
|
write!(fmt, " -> [")?;
|
||||||
|
|
|
@ -357,7 +357,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
let mut dtor_did = None;
|
let mut dtor_did = None;
|
||||||
let ty = self.type_of(adt_did);
|
let ty = self.type_of(adt_did);
|
||||||
self.for_each_relevant_impl(drop_trait, ty, |impl_did| {
|
self.for_each_relevant_impl(drop_trait, ty, |impl_did| {
|
||||||
if let Some(item) = self.associated_items(impl_did).in_definition_order().nth(0) {
|
if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
|
||||||
if validate(self, impl_did).is_ok() {
|
if validate(self, impl_did).is_ok() {
|
||||||
dtor_did = Some(item.def_id);
|
dtor_did = Some(item.def_id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -230,7 +230,7 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let first_succ = {
|
let first_succ = {
|
||||||
if let Some(&first_succ) = terminator.successors().nth(0) {
|
if let Some(&first_succ) = terminator.successors().next() {
|
||||||
if terminator.successors().all(|s| *s == first_succ) {
|
if terminator.successors().all(|s| *s == first_succ) {
|
||||||
let count = terminator.successors().count();
|
let count = terminator.successors().count();
|
||||||
self.pred_count[first_succ] -= (count - 1) as u32;
|
self.pred_count[first_succ] -= (count - 1) as u32;
|
||||||
|
|
|
@ -549,7 +549,7 @@ where
|
||||||
debug!("destructor_call_block({:?}, {:?})", self, succ);
|
debug!("destructor_call_block({:?}, {:?})", self, succ);
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
let drop_trait = tcx.lang_items().drop_trait().unwrap();
|
let drop_trait = tcx.lang_items().drop_trait().unwrap();
|
||||||
let drop_fn = tcx.associated_items(drop_trait).in_definition_order().nth(0).unwrap();
|
let drop_fn = tcx.associated_items(drop_trait).in_definition_order().next().unwrap();
|
||||||
let ty = self.place_ty(self.place);
|
let ty = self.place_ty(self.place);
|
||||||
let substs = tcx.mk_substs_trait(ty, &[]);
|
let substs = tcx.mk_substs_trait(ty, &[]);
|
||||||
|
|
||||||
|
|
|
@ -1000,7 +1000,7 @@ impl<'tcx> Constructor<'tcx> {
|
||||||
PatKind::Leaf { subpatterns }
|
PatKind::Leaf { subpatterns }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.nth(0).unwrap() },
|
ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||||
ty::Slice(_) | ty::Array(..) => bug!("bad slice pattern {:?} {:?}", self, ty),
|
ty::Slice(_) | ty::Array(..) => bug!("bad slice pattern {:?} {:?}", self, ty),
|
||||||
_ => PatKind::Wild,
|
_ => PatKind::Wild,
|
||||||
},
|
},
|
||||||
|
|
|
@ -1008,7 +1008,7 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
let kind = if es.len() == 1 && !trailing_comma {
|
let kind = if es.len() == 1 && !trailing_comma {
|
||||||
// `(e)` is parenthesized `e`.
|
// `(e)` is parenthesized `e`.
|
||||||
ExprKind::Paren(es.into_iter().nth(0).unwrap())
|
ExprKind::Paren(es.into_iter().next().unwrap())
|
||||||
} else {
|
} else {
|
||||||
// `(e,)` is a tuple with only one field, `e`.
|
// `(e,)` is a tuple with only one field, `e`.
|
||||||
ExprKind::Tup(es)
|
ExprKind::Tup(es)
|
||||||
|
|
|
@ -479,7 +479,7 @@ impl<'a> Parser<'a> {
|
||||||
// Here, `(pat,)` is a tuple pattern.
|
// Here, `(pat,)` is a tuple pattern.
|
||||||
// For backward compatibility, `(..)` is a tuple pattern as well.
|
// For backward compatibility, `(..)` is a tuple pattern as well.
|
||||||
Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
|
Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
|
||||||
PatKind::Paren(fields.into_iter().nth(0).unwrap())
|
PatKind::Paren(fields.into_iter().next().unwrap())
|
||||||
} else {
|
} else {
|
||||||
PatKind::Tuple(fields)
|
PatKind::Tuple(fields)
|
||||||
})
|
})
|
||||||
|
|
|
@ -198,7 +198,7 @@ impl<'a> Parser<'a> {
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if ts.len() == 1 && !trailing {
|
if ts.len() == 1 && !trailing {
|
||||||
let ty = ts.into_iter().nth(0).unwrap().into_inner();
|
let ty = ts.into_iter().next().unwrap().into_inner();
|
||||||
let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
|
let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
|
||||||
match ty.kind {
|
match ty.kind {
|
||||||
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
|
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
|
||||||
|
|
|
@ -620,7 +620,7 @@ impl SourceMap {
|
||||||
/// if no character could be found or if an error occurred while retrieving the code snippet.
|
/// if no character could be found or if an error occurred while retrieving the code snippet.
|
||||||
pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span {
|
pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span {
|
||||||
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
||||||
let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_start();
|
let prev_source = prev_source.rsplit(c).next().unwrap_or("").trim_start();
|
||||||
if !prev_source.is_empty() && !prev_source.contains('\n') {
|
if !prev_source.is_empty() && !prev_source.contains('\n') {
|
||||||
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
||||||
}
|
}
|
||||||
|
@ -640,7 +640,7 @@ impl SourceMap {
|
||||||
for ws in &[" ", "\t", "\n"] {
|
for ws in &[" ", "\t", "\n"] {
|
||||||
let pat = pat.to_owned() + ws;
|
let pat = pat.to_owned() + ws;
|
||||||
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
||||||
let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_start();
|
let prev_source = prev_source.rsplit(&pat).next().unwrap_or("").trim_start();
|
||||||
if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) {
|
if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) {
|
||||||
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
||||||
}
|
}
|
||||||
|
@ -655,7 +655,7 @@ impl SourceMap {
|
||||||
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
|
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
|
||||||
match self.span_to_snippet(sp) {
|
match self.span_to_snippet(sp) {
|
||||||
Ok(snippet) => {
|
Ok(snippet) => {
|
||||||
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_end();
|
let snippet = snippet.split(c).next().unwrap_or("").trim_end();
|
||||||
if !snippet.is_empty() && !snippet.contains('\n') {
|
if !snippet.is_empty() && !snippet.contains('\n') {
|
||||||
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -675,7 +675,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// The `Future` trait has only one associted item, `Output`,
|
// The `Future` trait has only one associted item, `Output`,
|
||||||
// so check that this is what we see.
|
// so check that this is what we see.
|
||||||
let output_assoc_item =
|
let output_assoc_item =
|
||||||
self.tcx.associated_items(future_trait).in_definition_order().nth(0).unwrap().def_id;
|
self.tcx.associated_items(future_trait).in_definition_order().next().unwrap().def_id;
|
||||||
if output_assoc_item != predicate.projection_ty.item_def_id {
|
if output_assoc_item != predicate.projection_ty.item_def_id {
|
||||||
span_bug!(
|
span_bug!(
|
||||||
cause_span,
|
cause_span,
|
||||||
|
|
|
@ -5244,7 +5244,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
.tcx
|
.tcx
|
||||||
.associated_items(future_trait)
|
.associated_items(future_trait)
|
||||||
.in_definition_order()
|
.in_definition_order()
|
||||||
.nth(0)
|
.next()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.def_id;
|
.def_id;
|
||||||
let predicate =
|
let predicate =
|
||||||
|
|
|
@ -147,7 +147,7 @@ fn main() {
|
||||||
eprintln!("Must provide path to write unicode tables to");
|
eprintln!("Must provide path to write unicode tables to");
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"e.g. {} src/libcore/unicode/unicode_data.rs",
|
"e.g. {} src/libcore/unicode/unicode_data.rs",
|
||||||
std::env::args().nth(0).unwrap_or_default()
|
std::env::args().next().unwrap_or_default()
|
||||||
);
|
);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
});
|
});
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue