1
Fork 0

Auto merge of #69746 - Dylan-DPC:rollup-wr6dvdk, r=Dylan-DPC

Rollup of 8 pull requests

Successful merges:

 - #69697 (Add explanation for E0380)
 - #69698 (Use associated constants of integer types)
 - #69711 (Update macros.rs: fix documentation typo.)
 - #69713 (more clippy cleanups)
 - #69728 (Make link to `std::str` active)
 - #69732 (Clean E0382 and E0384 explanations)
 - #69736 (even more clippy cleanups)
 - #69742 (Fixed a typo)

Failed merges:

r? @ghost
This commit is contained in:
bors 2020-03-05 21:39:00 +00:00
commit b818ccc74c
57 changed files with 128 additions and 129 deletions

View file

@ -407,7 +407,7 @@ impl String {
/// ///
/// assert_eq!(s.capacity(), cap); /// assert_eq!(s.capacity(), cap);
/// ///
/// // ...but this may make the vector reallocate /// // ...but this may make the string reallocate
/// s.push('a'); /// s.push('a');
/// ``` /// ```
#[inline] #[inline]

View file

@ -2,7 +2,9 @@
//! String manipulation. //! String manipulation.
//! //!
//! For more details, see the `std::str` module. //! For more details, see the [`std::str`] module.
//!
//! [`std::str`]: ../../std/str/index.html
#![stable(feature = "rust1", since = "1.0.0")] #![stable(feature = "rust1", since = "1.0.0")]

View file

@ -524,7 +524,7 @@ impl DepGraph {
edge_list_indices.push((start, end)); edge_list_indices.push((start, end));
} }
debug_assert!(edge_list_data.len() <= ::std::u32::MAX as usize); debug_assert!(edge_list_data.len() <= u32::MAX as usize);
debug_assert_eq!(edge_list_data.len(), total_edge_count); debug_assert_eq!(edge_list_data.len(), total_edge_count);
SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data } SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data }

View file

@ -818,9 +818,9 @@ impl UndefMask {
// First set all bits except the first `bita`, // First set all bits except the first `bita`,
// then unset the last `64 - bitb` bits. // then unset the last `64 - bitb` bits.
let range = if bitb == 0 { let range = if bitb == 0 {
u64::max_value() << bita u64::MAX << bita
} else { } else {
(u64::max_value() << bita) & (u64::max_value() >> (64 - bitb)) (u64::MAX << bita) & (u64::MAX >> (64 - bitb))
}; };
if new_state { if new_state {
self.blocks[blocka] |= range; self.blocks[blocka] |= range;
@ -832,21 +832,21 @@ impl UndefMask {
// across block boundaries // across block boundaries
if new_state { if new_state {
// Set `bita..64` to `1`. // Set `bita..64` to `1`.
self.blocks[blocka] |= u64::max_value() << bita; self.blocks[blocka] |= u64::MAX << bita;
// Set `0..bitb` to `1`. // Set `0..bitb` to `1`.
if bitb != 0 { if bitb != 0 {
self.blocks[blockb] |= u64::max_value() >> (64 - bitb); self.blocks[blockb] |= u64::MAX >> (64 - bitb);
} }
// Fill in all the other blocks (much faster than one bit at a time). // Fill in all the other blocks (much faster than one bit at a time).
for block in (blocka + 1)..blockb { for block in (blocka + 1)..blockb {
self.blocks[block] = u64::max_value(); self.blocks[block] = u64::MAX;
} }
} else { } else {
// Set `bita..64` to `0`. // Set `bita..64` to `0`.
self.blocks[blocka] &= !(u64::max_value() << bita); self.blocks[blocka] &= !(u64::MAX << bita);
// Set `0..bitb` to `0`. // Set `0..bitb` to `0`.
if bitb != 0 { if bitb != 0 {
self.blocks[blockb] &= !(u64::max_value() >> (64 - bitb)); self.blocks[blockb] &= !(u64::MAX >> (64 - bitb));
} }
// Fill in all the other blocks (much faster than one bit at a time). // Fill in all the other blocks (much faster than one bit at a time).
for block in (blocka + 1)..blockb { for block in (blocka + 1)..blockb {

View file

@ -78,9 +78,9 @@ pub trait PointerArithmetic: layout::HasDataLayout {
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) { fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here? // FIXME: is it possible to over/underflow here?
if i < 0 { if i < 0 {
// Trickery to ensure that `i64::min_value()` works fine: compute `n = -i`. // Trickery to ensure that `i64::MIN` works fine: compute `n = -i`.
// This formula only works for true negative values; it overflows for zero! // This formula only works for true negative values; it overflows for zero!
let n = u64::max_value() - (i as u64) + 1; let n = u64::MAX - (i as u64) + 1;
let res = val.overflowing_sub(n); let res = val.overflowing_sub(n);
self.truncate_to_ptr(res) self.truncate_to_ptr(res)
} else { } else {

View file

@ -1198,7 +1198,7 @@ impl<'tcx> TerminatorKind<'tcx> {
t: BasicBlock, t: BasicBlock,
f: BasicBlock, f: BasicBlock,
) -> TerminatorKind<'tcx> { ) -> TerminatorKind<'tcx> {
static BOOL_SWITCH_FALSE: &'static [u128] = &[0]; static BOOL_SWITCH_FALSE: &[u128] = &[0];
TerminatorKind::SwitchInt { TerminatorKind::SwitchInt {
discr: cond, discr: cond,
switch_ty: tcx.types.bool, switch_ty: tcx.types.bool,

View file

@ -415,9 +415,9 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
super::ReferenceOutlivesReferent(ty) => { super::ReferenceOutlivesReferent(ty) => {
tcx.lift(&ty).map(super::ReferenceOutlivesReferent) tcx.lift(&ty).map(super::ReferenceOutlivesReferent)
} }
super::ObjectTypeBound(ty, r) => tcx super::ObjectTypeBound(ty, r) => {
.lift(&ty) tcx.lift(&ty).and_then(|ty| tcx.lift(&r).map(|r| super::ObjectTypeBound(ty, r)))
.and_then(|ty| tcx.lift(&r).and_then(|r| Some(super::ObjectTypeBound(ty, r)))), }
super::ObjectCastObligation(ty) => tcx.lift(&ty).map(super::ObjectCastObligation), super::ObjectCastObligation(ty) => tcx.lift(&ty).map(super::ObjectCastObligation),
super::Coercion { source, target } => { super::Coercion { source, target } => {
Some(super::Coercion { source: tcx.lift(&source)?, target: tcx.lift(&target)? }) Some(super::Coercion { source: tcx.lift(&source)?, target: tcx.lift(&target)? })

View file

@ -7,7 +7,6 @@ use rustc_span::DUMMY_SP;
use std::cmp; use std::cmp;
use std::fmt; use std::fmt;
use std::i128;
use std::iter; use std::iter;
use std::mem; use std::mem;
use std::ops::Bound; use std::ops::Bound;
@ -1001,7 +1000,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
} }
} }
let (mut min, mut max) = (i128::max_value(), i128::min_value()); let (mut min, mut max) = (i128::MAX, i128::MIN);
let discr_type = def.repr.discr_type(); let discr_type = def.repr.discr_type();
let bits = Integer::from_attr(self, discr_type).size().bits(); let bits = Integer::from_attr(self, discr_type).size().bits();
for (i, discr) in def.discriminants(tcx) { for (i, discr) in def.discriminants(tcx) {
@ -1021,7 +1020,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
} }
} }
// We might have no inhabited variants, so pretend there's at least one. // We might have no inhabited variants, so pretend there's at least one.
if (min, max) == (i128::max_value(), i128::min_value()) { if (min, max) == (i128::MAX, i128::MIN) {
min = 0; min = 0;
max = 0; max = 0;
} }

View file

@ -920,7 +920,7 @@ pub trait PrettyPrinter<'tcx>:
} }
(ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Uint(ui)) => { (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Uint(ui)) => {
let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(*ui)).size(); let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(*ui)).size();
let max = truncate(u128::max_value(), bit_size); let max = truncate(u128::MAX, bit_size);
let ui_str = ui.name_str(); let ui_str = ui.name_str();
if data == max { if data == max {

View file

@ -92,7 +92,7 @@ struct AbsoluteBytePos(u32);
impl AbsoluteBytePos { impl AbsoluteBytePos {
fn new(pos: usize) -> AbsoluteBytePos { fn new(pos: usize) -> AbsoluteBytePos {
debug_assert!(pos <= ::std::u32::MAX as usize); debug_assert!(pos <= u32::MAX as usize);
AbsoluteBytePos(pos as u32) AbsoluteBytePos(pos as u32)
} }

View file

@ -50,11 +50,11 @@ fn signed_min(size: Size) -> i128 {
} }
fn signed_max(size: Size) -> i128 { fn signed_max(size: Size) -> i128 {
i128::max_value() >> (128 - size.bits()) i128::MAX >> (128 - size.bits())
} }
fn unsigned_max(size: Size) -> u128 { fn unsigned_max(size: Size) -> u128 {
u128::max_value() >> (128 - size.bits()) u128::MAX >> (128 - size.bits())
} }
fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) { fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
@ -77,7 +77,7 @@ impl<'tcx> Discr<'tcx> {
let min = signed_min(size); let min = signed_min(size);
let max = signed_max(size); let max = signed_max(size);
let val = sign_extend(self.val, size) as i128; let val = sign_extend(self.val, size) as i128;
assert!(n < (i128::max_value() as u128)); assert!(n < (i128::MAX as u128));
let n = n as i128; let n = n as i128;
let oflo = val > max - n; let oflo = val > max - n;
let val = if oflo { min + (n - (max - val) - 1) } else { val + n }; let val = if oflo { min + (n - (max - val) - 1) } else { val + n };

View file

@ -284,7 +284,7 @@ impl<'a, 'b> Context<'a, 'b> {
err.tool_only_span_suggestion( err.tool_only_span_suggestion(
sp, sp,
&format!("use the `{}` trait", name), &format!("use the `{}` trait", name),
fmt.to_string(), (*fmt).to_string(),
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );
} }
@ -476,7 +476,7 @@ impl<'a, 'b> Context<'a, 'b> {
match ty { match ty {
Placeholder(_) => { Placeholder(_) => {
// record every (position, type) combination only once // record every (position, type) combination only once
let ref mut seen_ty = self.arg_unique_types[arg]; let seen_ty = &mut self.arg_unique_types[arg];
let i = seen_ty.iter().position(|x| *x == ty).unwrap_or_else(|| { let i = seen_ty.iter().position(|x| *x == ty).unwrap_or_else(|| {
let i = seen_ty.len(); let i = seen_ty.len();
seen_ty.push(ty); seen_ty.push(ty);
@ -526,7 +526,7 @@ impl<'a, 'b> Context<'a, 'b> {
// Map the arguments // Map the arguments
for i in 0..args_len { for i in 0..args_len {
let ref arg_types = self.arg_types[i]; let arg_types = &self.arg_types[i];
let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>(); let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>();
self.arg_index_map.push(arg_offsets); self.arg_index_map.push(arg_offsets);
sofar += self.arg_unique_types[i].len(); sofar += self.arg_unique_types[i].len();
@ -597,7 +597,7 @@ impl<'a, 'b> Context<'a, 'b> {
let arg_idx = match arg_index_consumed.get_mut(i) { let arg_idx = match arg_index_consumed.get_mut(i) {
None => 0, // error already emitted elsewhere None => 0, // error already emitted elsewhere
Some(offset) => { Some(offset) => {
let ref idx_map = self.arg_index_map[i]; let idx_map = &self.arg_index_map[i];
// unwrap_or branch: error already emitted elsewhere // unwrap_or branch: error already emitted elsewhere
let arg_idx = *idx_map.get(*offset).unwrap_or(&0); let arg_idx = *idx_map.get(*offset).unwrap_or(&0);
*offset += 1; *offset += 1;
@ -721,7 +721,7 @@ impl<'a, 'b> Context<'a, 'b> {
let name = names_pos[i]; let name = names_pos[i];
let span = self.ecx.with_def_site_ctxt(e.span); let span = self.ecx.with_def_site_ctxt(e.span);
pats.push(self.ecx.pat_ident(span, name)); pats.push(self.ecx.pat_ident(span, name));
for ref arg_ty in self.arg_unique_types[i].iter() { for arg_ty in self.arg_unique_types[i].iter() {
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
} }
heads.push(self.ecx.expr_addr_of(e.span, e)); heads.push(self.ecx.expr_addr_of(e.span, e));

View file

@ -57,12 +57,12 @@ impl AllocFnFactory<'_, '_> {
fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt { fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt {
let mut abi_args = Vec::new(); let mut abi_args = Vec::new();
let mut i = 0; let mut i = 0;
let ref mut mk = || { let mut mk = || {
let name = self.cx.ident_of(&format!("arg{}", i), self.span); let name = self.cx.ident_of(&format!("arg{}", i), self.span);
i += 1; i += 1;
name name
}; };
let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, mk)).collect(); let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, &mut mk)).collect();
let result = self.call_allocator(method.name, args); let result = self.call_allocator(method.name, args);
let (output_ty, output_expr) = self.ret_ty(&method.output, result); let (output_ty, output_expr) = self.ret_ty(&method.output, result);
let decl = self.cx.fn_decl(abi_args, ast::FnRetTy::Ty(output_ty)); let decl = self.cx.fn_decl(abi_args, ast::FnRetTy::Ty(output_ty));

View file

@ -313,7 +313,7 @@ fn should_fail(i: &ast::Item) -> bool {
fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic { fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic {
match attr::find_by_name(&i.attrs, sym::should_panic) { match attr::find_by_name(&i.attrs, sym::should_panic) {
Some(attr) => { Some(attr) => {
let ref sd = cx.parse_sess.span_diagnostic; let sd = &cx.parse_sess.span_diagnostic;
match attr.meta_item_list() { match attr.meta_item_list() {
// Handle #[should_panic(expected = "foo")] // Handle #[should_panic(expected = "foo")]
@ -378,7 +378,7 @@ fn test_type(cx: &ExtCtxt<'_>) -> TestType {
fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic); let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic);
let ref sd = cx.parse_sess.span_diagnostic; let sd = &cx.parse_sess.span_diagnostic;
if let ast::ItemKind::Fn(_, ref sig, ref generics, _) = i.kind { if let ast::ItemKind::Fn(_, ref sig, ref generics, _) = i.kind {
if let ast::Unsafe::Yes(span) = sig.header.unsafety { if let ast::Unsafe::Yes(span) = sig.header.unsafety {
sd.struct_span_err(i.span, "unsafe functions cannot be used for tests") sd.struct_span_err(i.span, "unsafe functions cannot be used for tests")

View file

@ -326,7 +326,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
/// &[&test1, &test2] /// &[&test1, &test2]
fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> { fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> {
debug!("building test vector from {} tests", cx.test_cases.len()); debug!("building test vector from {} tests", cx.test_cases.len());
let ref ecx = cx.ext_cx; let ecx = &cx.ext_cx;
ecx.expr_vec_slice( ecx.expr_vec_slice(
sp, sp,

View file

@ -60,7 +60,7 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
.chain(ia.inputs.iter().map(|s| s.to_string())) .chain(ia.inputs.iter().map(|s| s.to_string()))
.chain(ext_constraints) .chain(ext_constraints)
.chain(clobbers) .chain(clobbers)
.chain(arch_clobbers.iter().map(|s| s.to_string())) .chain(arch_clobbers.iter().map(|s| (*s).to_string()))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(","); .join(",");

View file

@ -725,7 +725,7 @@ pub(crate) unsafe fn codegen(
Err(_) => return 0, Err(_) => return 0,
}; };
if let Err(_) = write!(cursor, "{:#}", demangled) { if write!(cursor, "{:#}", demangled).is_err() {
// Possible only if provided buffer is not big enough // Possible only if provided buffer is not big enough
return 0; return 0;
} }

View file

@ -174,7 +174,6 @@ pub unsafe fn create_module(
let llvm_data_layout = llvm::LLVMGetDataLayout(llmod); let llvm_data_layout = llvm::LLVMGetDataLayout(llmod);
let llvm_data_layout = str::from_utf8(CStr::from_ptr(llvm_data_layout).to_bytes()) let llvm_data_layout = str::from_utf8(CStr::from_ptr(llvm_data_layout).to_bytes())
.ok()
.expect("got a non-UTF8 data-layout from LLVM"); .expect("got a non-UTF8 data-layout from LLVM");
// Unfortunately LLVM target specs change over time, and right now we // Unfortunately LLVM target specs change over time, and right now we

View file

@ -1257,7 +1257,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
if main_thread_worker_state == MainThreadWorkerState::Idle { if main_thread_worker_state == MainThreadWorkerState::Idle {
if !queue_full_enough(work_items.len(), running, max_workers) { if !queue_full_enough(work_items.len(), running, max_workers) {
// The queue is not full enough, codegen more items: // The queue is not full enough, codegen more items:
if let Err(_) = codegen_worker_send.send(Message::CodegenItem) { if codegen_worker_send.send(Message::CodegenItem).is_err() {
panic!("Could not send Message::CodegenItem to main thread") panic!("Could not send Message::CodegenItem to main thread")
} }
main_thread_worker_state = MainThreadWorkerState::Codegenning; main_thread_worker_state = MainThreadWorkerState::Codegenning;

View file

@ -1,4 +1,14 @@
Auto traits cannot have methods or associated items. An auto trait was declared with a method or an associated item.
For more information see the [opt-in builtin traits RFC][RFC 19].
Erroneous code example:
```compile_fail,E0380
unsafe auto trait Trait {
type Output; // error!
}
```
Auto traits cannot have methods or associated items. For more information see
the [opt-in builtin traits RFC][RFC 19].
[RFC 19]: https://github.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md [RFC 19]: https://github.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md

View file

@ -1,5 +1,4 @@
This error occurs when an attempt is made to use a variable after its contents A variable was used after its contents have been moved elsewhere.
have been moved elsewhere.
Erroneous code example: Erroneous code example:

View file

@ -1,4 +1,4 @@
This error occurs when an attempt is made to reassign an immutable variable. An immutable variable was reassigned.
Erroneous code example: Erroneous code example:

View file

@ -163,7 +163,7 @@ impl CodeSuggestion {
None => buf.push_str(&line[lo..]), None => buf.push_str(&line[lo..]),
} }
} }
if let None = hi_opt { if hi_opt.is_none() {
buf.push('\n'); buf.push('\n');
} }
} }

View file

@ -27,6 +27,6 @@ impl Registry {
if !self.long_descriptions.contains_key(code) { if !self.long_descriptions.contains_key(code) {
return Err(InvalidErrorCode); return Err(InvalidErrorCode);
} }
Ok(self.long_descriptions.get(code).unwrap().clone()) Ok(*self.long_descriptions.get(code).unwrap())
} }
} }

View file

@ -343,7 +343,8 @@ impl DirtyCleanVisitor<'tcx> {
&format!("clean/dirty auto-assertions not yet defined for {:?}", node), &format!("clean/dirty auto-assertions not yet defined for {:?}", node),
), ),
}; };
let labels = Labels::from_iter(labels.iter().flat_map(|s| s.iter().map(|l| l.to_string()))); let labels =
Labels::from_iter(labels.iter().flat_map(|s| s.iter().map(|l| (*l).to_string())));
(name, labels) (name, labels)
} }

View file

@ -150,7 +150,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
// SelectionContext to return it back to us. // SelectionContext to return it back to us.
let (new_env, user_env) = match self.evaluate_predicates( let (new_env, user_env) = match self.evaluate_predicates(
&mut infcx, &infcx,
trait_did, trait_did,
ty, ty,
orig_env, orig_env,

View file

@ -1341,7 +1341,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
stack: &TraitObligationStack<'o, 'tcx>, stack: &TraitObligationStack<'o, 'tcx>,
) -> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>> { ) -> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>> {
let TraitObligationStack { obligation, .. } = *stack; let TraitObligationStack { obligation, .. } = *stack;
let ref obligation = Obligation { let obligation = &Obligation {
param_env: obligation.param_env, param_env: obligation.param_env,
cause: obligation.cause.clone(), cause: obligation.cause.clone(),
recursion_depth: obligation.recursion_depth, recursion_depth: obligation.recursion_depth,

View file

@ -426,7 +426,7 @@ pub(crate) fn check_attr_crate_type(attrs: &[ast::Attribute], lint_buffer: &mut
for a in attrs.iter() { for a in attrs.iter() {
if a.check_name(sym::crate_type) { if a.check_name(sym::crate_type) {
if let Some(n) = a.value_str() { if let Some(n) = a.value_str() {
if let Some(_) = categorize_crate_type(n) { if categorize_crate_type(n).is_some() {
return; return;
} }

View file

@ -335,7 +335,7 @@ impl LintStore {
lint_name.to_string() lint_name.to_string()
}; };
// If the lint was scoped with `tool::` check if the tool lint exists // If the lint was scoped with `tool::` check if the tool lint exists
if let Some(_) = tool_name { if tool_name.is_some() {
match self.by_name.get(&complete_name) { match self.by_name.get(&complete_name) {
None => match self.lint_groups.get(&*complete_name) { None => match self.lint_groups.get(&*complete_name) {
None => return CheckLintNameResult::Tool(Err((None, String::new()))), None => return CheckLintNameResult::Tool(Err((None, String::new()))),
@ -369,7 +369,7 @@ impl LintStore {
return if *silent { return if *silent {
CheckLintNameResult::Ok(&lint_ids) CheckLintNameResult::Ok(&lint_ids)
} else { } else {
CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string()))) CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string())))
}; };
} }
CheckLintNameResult::Ok(&lint_ids) CheckLintNameResult::Ok(&lint_ids)
@ -404,7 +404,7 @@ impl LintStore {
return if *silent { return if *silent {
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
} else { } else {
CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string()))) CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string())))
}; };
} }
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))

View file

@ -1905,7 +1905,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// expressions evaluate through `as_temp` or `into` a return // expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough // slot or local, so to find all unsized rvalues it is enough
// to check all temps, return slots and locals. // to check all temps, return slots and locals.
if let None = self.reported_errors.replace((ty, span)) { if self.reported_errors.replace((ty, span)).is_none() {
let mut diag = struct_span_err!( let mut diag = struct_span_err!(
self.tcx().sess, self.tcx().sess,
span, span,

View file

@ -64,7 +64,7 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
} }
fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> { fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> {
if let Some(_) = &mut self.borrowck_context { if self.borrowck_context.is_some() {
let origin = NLLRegionVariableOrigin::Existential { from_forall }; let origin = NLLRegionVariableOrigin::Existential { from_forall };
self.infcx.next_nll_region_var(origin) self.infcx.next_nll_region_var(origin)
} else { } else {

View file

@ -604,8 +604,8 @@ fn write_diff<A: Analysis<'tcx>>(
Ok(()) Ok(())
} }
const BR_LEFT: &'static str = r#"<br align="left"/>"#; const BR_LEFT: &str = r#"<br align="left"/>"#;
const BR_LEFT_SPACE: &'static str = r#"<br align="left"/> "#; const BR_LEFT_SPACE: &str = r#"<br align="left"/> "#;
/// Line break policy that breaks at 40 characters and starts the next line with a single space. /// Line break policy that breaks at 40 characters and starts the next line with a single space.
const LIMIT_30_ALIGN_1: Option<LineBreak> = Some(LineBreak { sequence: BR_LEFT_SPACE, limit: 30 }); const LIMIT_30_ALIGN_1: Option<LineBreak> = Some(LineBreak { sequence: BR_LEFT_SPACE, limit: 30 });

View file

@ -22,20 +22,20 @@ pub fn visit_results<F>(
let loc = Location { block, statement_index }; let loc = Location { block, statement_index };
results.reconstruct_before_statement_effect(&mut state, stmt, loc); results.reconstruct_before_statement_effect(&mut state, stmt, loc);
vis.visit_statement(&mut state, stmt, loc); vis.visit_statement(&state, stmt, loc);
results.reconstruct_statement_effect(&mut state, stmt, loc); results.reconstruct_statement_effect(&mut state, stmt, loc);
vis.visit_statement_exit(&mut state, stmt, loc); vis.visit_statement_exit(&state, stmt, loc);
} }
let loc = body.terminator_loc(block); let loc = body.terminator_loc(block);
let term = block_data.terminator(); let term = block_data.terminator();
results.reconstruct_before_terminator_effect(&mut state, term, loc); results.reconstruct_before_terminator_effect(&mut state, term, loc);
vis.visit_terminator(&mut state, term, loc); vis.visit_terminator(&state, term, loc);
results.reconstruct_terminator_effect(&mut state, term, loc); results.reconstruct_terminator_effect(&mut state, term, loc);
vis.visit_terminator_exit(&mut state, term, loc); vis.visit_terminator_exit(&state, term, loc);
} }
} }

View file

@ -16,7 +16,6 @@ use std::borrow::Borrow;
use std::fmt; use std::fmt;
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use std::usize;
pub use self::at_location::{FlowAtLocation, FlowsAtLocation}; pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
pub(crate) use self::drop_flag_effects::*; pub(crate) use self::drop_flag_effects::*;

View file

@ -203,7 +203,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
if is_add { if is_add {
// max unsigned // max unsigned
Scalar::from_uint( Scalar::from_uint(
u128::max_value() >> (128 - num_bits), u128::MAX >> (128 - num_bits),
Size::from_bits(num_bits), Size::from_bits(num_bits),
) )
} else { } else {
@ -381,11 +381,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
dest: PlaceTy<'tcx, M::PointerTag>, dest: PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
// Performs an exact division, resulting in undefined behavior where // Performs an exact division, resulting in undefined behavior where
// `x % y != 0` or `y == 0` or `x == T::min_value() && y == -1`. // `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`.
// First, check x % y != 0 (or if that computation overflows). // First, check x % y != 0 (or if that computation overflows).
let (res, overflow, _ty) = self.overflowing_binary_op(BinOp::Rem, a, b)?; let (res, overflow, _ty) = self.overflowing_binary_op(BinOp::Rem, a, b)?;
if overflow || res.assert_bits(a.layout.size) != 0 { if overflow || res.assert_bits(a.layout.size) != 0 {
// Then, check if `b` is -1, which is the "min_value / -1" case. // Then, check if `b` is -1, which is the "MIN / -1" case.
let minus1 = Scalar::from_int(-1, dest.layout.size); let minus1 = Scalar::from_int(-1, dest.layout.size);
let b_scalar = b.to_scalar().unwrap(); let b_scalar = b.to_scalar().unwrap();
if b_scalar == minus1 { if b_scalar == minus1 {

View file

@ -565,7 +565,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// # Function pointers // # Function pointers
// (both global from `alloc_map` and local from `extra_fn_ptr_map`) // (both global from `alloc_map` and local from `extra_fn_ptr_map`)
if let Some(_) = self.get_fn_alloc(id) { if self.get_fn_alloc(id).is_some() {
return if let AllocCheck::Dereferenceable = liveness { return if let AllocCheck::Dereferenceable = liveness {
// The caller requested no function pointers. // The caller requested no function pointers.
throw_unsup!(DerefFunctionPointer) throw_unsup!(DerefFunctionPointer)

View file

@ -311,9 +311,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// taking into account the `spread_arg`. If we could write // taking into account the `spread_arg`. If we could write
// this is a single iterator (that handles `spread_arg`), then // this is a single iterator (that handles `spread_arg`), then
// `pass_argument` would be the loop body. It takes care to // `pass_argument` would be the loop body. It takes care to
// not advance `caller_iter` for ZSTs. // not advance `caller_iter` for ZSTs
let mut locals_iter = body.args_iter(); for local in body.args_iter() {
while let Some(local) = locals_iter.next() {
let dest = self.eval_place(&mir::Place::from(local))?; let dest = self.eval_place(&mir::Place::from(local))?;
if Some(local) == body.spread_arg { if Some(local) == body.spread_arg {
// Must be a tuple // Must be a tuple

View file

@ -463,7 +463,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M
let (lo, hi) = valid_range.clone().into_inner(); let (lo, hi) = valid_range.clone().into_inner();
// Determine the allowed range // Determine the allowed range
// `max_hi` is as big as the size fits // `max_hi` is as big as the size fits
let max_hi = u128::max_value() >> (128 - op.layout.size.bits()); let max_hi = u128::MAX >> (128 - op.layout.size.bits());
assert!(hi <= max_hi); assert!(hi <= max_hi);
// We could also write `(hi + 1) % (max_hi + 1) == lo` but `max_hi + 1` overflows for `u128` // We could also write `(hi + 1) % (max_hi + 1) == lo` but `max_hi + 1` overflows for `u128`
if (lo == 0 && hi == max_hi) || (hi + 1 == lo) { if (lo == 0 && hi == max_hi) || (hi + 1 == lo) {

View file

@ -920,7 +920,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
let (blocks, local_decls) = self.source.basic_blocks_and_local_decls_mut(); let (blocks, local_decls) = self.source.basic_blocks_and_local_decls_mut();
match candidate { match candidate {
Candidate::Ref(loc) => { Candidate::Ref(loc) => {
let ref mut statement = blocks[loc.block].statements[loc.statement_index]; let statement = &mut blocks[loc.block].statements[loc.statement_index];
match statement.kind { match statement.kind {
StatementKind::Assign(box ( StatementKind::Assign(box (
_, _,
@ -971,7 +971,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
} }
} }
Candidate::Repeat(loc) => { Candidate::Repeat(loc) => {
let ref mut statement = blocks[loc.block].statements[loc.statement_index]; let statement = &mut blocks[loc.block].statements[loc.statement_index];
match statement.kind { match statement.kind {
StatementKind::Assign(box (_, Rvalue::Repeat(ref mut operand, _))) => { StatementKind::Assign(box (_, Rvalue::Repeat(ref mut operand, _))) => {
let ty = operand.ty(local_decls, self.tcx); let ty = operand.ty(local_decls, self.tcx);

View file

@ -2331,7 +2331,7 @@ fn specialize_one_pattern<'p, 'tcx>(
PatKind::Binding { .. } | PatKind::Wild => Some(ctor_wild_subpatterns.iter().collect()), PatKind::Binding { .. } | PatKind::Wild => Some(ctor_wild_subpatterns.iter().collect()),
PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => { PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => {
let ref variant = adt_def.variants[variant_index]; let variant = &adt_def.variants[variant_index];
let is_non_exhaustive = cx.is_foreign_non_exhaustive_variant(pat.ty, variant); let is_non_exhaustive = cx.is_foreign_non_exhaustive_variant(pat.ty, variant);
Some(Variant(variant.def_id)) Some(Variant(variant.def_id))
.filter(|variant_constructor| variant_constructor == constructor) .filter(|variant_constructor| variant_constructor == constructor)

View file

@ -37,7 +37,7 @@ impl<'a> Parser<'a> {
let inner_parse_policy = InnerAttributeParsePolicy::NotPermitted { let inner_parse_policy = InnerAttributeParsePolicy::NotPermitted {
reason: inner_error_reason, reason: inner_error_reason,
saw_doc_comment: just_parsed_doc_comment, saw_doc_comment: just_parsed_doc_comment,
prev_attr_sp: attrs.last().and_then(|a| Some(a.span)), prev_attr_sp: attrs.last().map(|a| a.span),
}; };
let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
attrs.push(attr); attrs.push(attr);

View file

@ -19,7 +19,7 @@ use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
use log::{debug, trace}; use log::{debug, trace};
use std::mem; use std::mem;
const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments";
/// Creates a placeholder argument. /// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident) -> Param { pub(super) fn dummy_arg(ident: Ident) -> Param {

View file

@ -196,7 +196,7 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
// The file may be empty, which leads to the diagnostic machinery not emitting this // The file may be empty, which leads to the diagnostic machinery not emitting this
// note. This is a relatively simple way to detect that case and emit a span-less // note. This is a relatively simple way to detect that case and emit a span-less
// note instead. // note instead.
if let Ok(_) = tcx.sess.source_map().lookup_line(sp.lo()) { if tcx.sess.source_map().lookup_line(sp.lo()).is_ok() {
err.set_span(sp); err.set_span(sp);
err.span_label(sp, &note); err.span_label(sp, &note);
} else { } else {

View file

@ -1432,7 +1432,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
} }
msg msg
} }
ref s @ _ => bug!("unexpected import subclass {:?}", s), ref s => bug!("unexpected import subclass {:?}", s),
}; };
let mut err = this.session.struct_span_err(binding.span, &msg); let mut err = this.session.struct_span_err(binding.span, &msg);

View file

@ -1086,7 +1086,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
for param in params { for param in params {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(param.span) if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(param.span)
{ {
if snippet.starts_with("&") && !snippet.starts_with("&'") { if snippet.starts_with('&') && !snippet.starts_with("&'") {
introduce_suggestion introduce_suggestion
.push((param.span, format!("&'a {}", &snippet[1..]))); .push((param.span, format!("&'a {}", &snippet[1..])));
} else if snippet.starts_with("&'_ ") { } else if snippet.starts_with("&'_ ") {
@ -1118,7 +1118,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
(1, Some(name), Some("'_")) => { (1, Some(name), Some("'_")) => {
suggest_existing(err, name.to_string()); suggest_existing(err, name.to_string());
} }
(1, Some(name), Some(snippet)) if !snippet.ends_with(">") => { (1, Some(name), Some(snippet)) if !snippet.ends_with('>') => {
suggest_existing(err, format!("{}<{}>", snippet, name)); suggest_existing(err, format!("{}<{}>", snippet, name));
} }
(0, _, Some("&")) => { (0, _, Some("&")) => {
@ -1127,7 +1127,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
(0, _, Some("'_")) => { (0, _, Some("'_")) => {
suggest_new(err, "'a"); suggest_new(err, "'a");
} }
(0, _, Some(snippet)) if !snippet.ends_with(">") => { (0, _, Some(snippet)) if !snippet.ends_with('>') => {
suggest_new(err, &format!("{}<'a>", snippet)); suggest_new(err, &format!("{}<'a>", snippet));
} }
_ => { _ => {

View file

@ -1550,21 +1550,18 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let method_names = pcx.candidate_method_names(); let method_names = pcx.candidate_method_names();
pcx.allow_similar_names = false; pcx.allow_similar_names = false;
let applicable_close_candidates: Vec<ty::AssocItem> = let applicable_close_candidates: Vec<ty::AssocItem> = method_names
method_names .iter()
.iter() .filter_map(|&method_name| {
.filter_map(|&method_name| { pcx.reset();
pcx.reset(); pcx.method_name = Some(method_name);
pcx.method_name = Some(method_name); pcx.assemble_inherent_candidates();
pcx.assemble_inherent_candidates(); pcx.assemble_extension_candidates_for_traits_in_scope(hir::DUMMY_HIR_ID)
pcx.assemble_extension_candidates_for_traits_in_scope(hir::DUMMY_HIR_ID) .map_or(None, |_| {
.map_or(None, |_| { pcx.pick_core().and_then(|pick| pick.ok()).map(|pick| pick.item)
pcx.pick_core() })
.and_then(|pick| pick.ok()) })
.and_then(|pick| Some(pick.item)) .collect();
})
})
.collect();
if applicable_close_candidates.is_empty() { if applicable_close_candidates.is_empty() {
Ok(None) Ok(None)

View file

@ -737,8 +737,8 @@ impl ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> {
} }
pub fn check_wf_new(tcx: TyCtxt<'_>) { pub fn check_wf_new(tcx: TyCtxt<'_>) {
let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx); let visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx);
tcx.hir().krate().par_visit_all_item_likes(&mut visit); tcx.hir().krate().par_visit_all_item_likes(&visit);
} }
fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: DefId) { fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: DefId) {

View file

@ -151,7 +151,7 @@ crate fn placeholder_type_error(
.unwrap_or(&"ParamName"); .unwrap_or(&"ParamName");
let mut sugg: Vec<_> = let mut sugg: Vec<_> =
placeholder_types.iter().map(|sp| (*sp, type_name.to_string())).collect(); placeholder_types.iter().map(|sp| (*sp, (*type_name).to_string())).collect();
if generics.is_empty() { if generics.is_empty() {
sugg.push((span, format!("<{}>", type_name))); sugg.push((span, format!("<{}>", type_name)));
} else if let Some(arg) = generics.iter().find(|arg| match arg.name { } else if let Some(arg) = generics.iter().find(|arg| match arg.name {
@ -160,7 +160,7 @@ crate fn placeholder_type_error(
}) { }) {
// Account for `_` already present in cases like `struct S<_>(_);` and suggest // Account for `_` already present in cases like `struct S<_>(_);` and suggest
// `struct S<T>(T);` instead of `struct S<_, T>(T);`. // `struct S<T>(T);` instead of `struct S<_, T>(T);`.
sugg.push((arg.span, type_name.to_string())); sugg.push((arg.span, (*type_name).to_string()));
} else { } else {
sugg.push(( sugg.push((
generics.iter().last().unwrap().span.shrink_to_hi(), generics.iter().last().unwrap().span.shrink_to_hi(),

View file

@ -50,7 +50,7 @@ pub use self::types::Type::*;
pub use self::types::Visibility::{Inherited, Public}; pub use self::types::Visibility::{Inherited, Public};
pub use self::types::*; pub use self::types::*;
const FN_OUTPUT_NAME: &'static str = "Output"; const FN_OUTPUT_NAME: &str = "Output";
pub trait Clean<T> { pub trait Clean<T> {
fn clean(&self, cx: &DocContext<'_>) -> T; fn clean(&self, cx: &DocContext<'_>) -> T;

View file

@ -121,9 +121,7 @@ pub fn external_generic_args(
let args: Vec<_> = substs let args: Vec<_> = substs
.iter() .iter()
.filter_map(|kind| match kind.unpack() { .filter_map(|kind| match kind.unpack() {
GenericArgKind::Lifetime(lt) => { GenericArgKind::Lifetime(lt) => lt.clean(cx).map(|lt| GenericArg::Lifetime(lt)),
lt.clean(cx).and_then(|lt| Some(GenericArg::Lifetime(lt)))
}
GenericArgKind::Type(_) if skip_self => { GenericArgKind::Type(_) if skip_self => {
skip_self = false; skip_self = false;
None None

View file

@ -90,14 +90,14 @@ impl DocFS {
let sender = self.errors.sender.clone().unwrap(); let sender = self.errors.sender.clone().unwrap();
rayon::spawn(move || match fs::write(&path, &contents) { rayon::spawn(move || match fs::write(&path, &contents) {
Ok(_) => { Ok(_) => {
sender sender.send(None).unwrap_or_else(|_| {
.send(None) panic!("failed to send error on \"{}\"", path.display())
.expect(&format!("failed to send error on \"{}\"", path.display())); });
} }
Err(e) => { Err(e) => {
sender sender.send(Some(format!("\"{}\": {}", path.display(), e))).unwrap_or_else(
.send(Some(format!("\"{}\": {}", path.display(), e))) |_| panic!("failed to send non-error on \"{}\"", path.display()),
.expect(&format!("failed to send non-error on \"{}\"", path.display())); );
} }
}); });
Ok(()) Ok(())

View file

@ -62,7 +62,7 @@ impl<'a> From<&'a clean::Item> for ItemType {
fn from(item: &'a clean::Item) -> ItemType { fn from(item: &'a clean::Item) -> ItemType {
let inner = match item.inner { let inner = match item.inner {
clean::StrippedItem(box ref item) => item, clean::StrippedItem(box ref item) => item,
ref inner @ _ => inner, ref inner => inner,
}; };
match *inner { match *inner {
@ -194,7 +194,7 @@ impl fmt::Display for ItemType {
} }
} }
pub const NAMESPACE_TYPE: &'static str = "t"; pub const NAMESPACE_TYPE: &str = "t";
pub const NAMESPACE_VALUE: &'static str = "v"; pub const NAMESPACE_VALUE: &str = "v";
pub const NAMESPACE_MACRO: &'static str = "m"; pub const NAMESPACE_MACRO: &str = "m";
pub const NAMESPACE_KEYWORD: &'static str = "k"; pub const NAMESPACE_KEYWORD: &str = "k";

View file

@ -296,7 +296,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
"" ""
} }
)), )),
playground_button.as_ref().map(String::as_str), playground_button.as_deref(),
Some((s1.as_str(), s2)), Some((s1.as_str(), s2)),
)); ));
Some(Event::Html(s.into())) Some(Event::Html(s.into()))
@ -315,7 +315,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
"" ""
} }
)), )),
playground_button.as_ref().map(String::as_str), playground_button.as_deref(),
None, None,
)); ));
Some(Event::Html(s.into())) Some(Event::Html(s.into()))
@ -869,12 +869,8 @@ pub fn plain_summary_line(md: &str) -> String {
} }
} }
let mut s = String::with_capacity(md.len() * 3 / 2); let mut s = String::with_capacity(md.len() * 3 / 2);
let mut p = ParserWrapper { inner: Parser::new(md), is_in: 0, is_first: true }; let p = ParserWrapper { inner: Parser::new(md), is_in: 0, is_first: true };
while let Some(t) = p.next() { p.into_iter().filter(|t| !t.is_empty()).for_each(|i| s.push_str(&i));
if !t.is_empty() {
s.push_str(&t);
}
}
s s
} }

View file

@ -2727,7 +2727,7 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>) -> String {
let name = it.name.as_ref().unwrap(); let name = it.name.as_ref().unwrap();
let ty = match it.type_() { let ty = match it.type_() {
Typedef | AssocType => AssocType, Typedef | AssocType => AssocType,
s @ _ => s, s => s,
}; };
let anchor = format!("#{}.{}", ty, name); let anchor = format!("#{}.{}", ty, name);
@ -3150,7 +3150,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
} }
} }
const ATTRIBUTE_WHITELIST: &'static [Symbol] = &[ const ATTRIBUTE_WHITELIST: &[Symbol] = &[
sym::export_name, sym::export_name,
sym::lang, sym::lang,
sym::link_section, sym::link_section,
@ -4610,7 +4610,7 @@ fn item_keyword(w: &mut Buffer, cx: &Context, it: &clean::Item) {
document(w, cx, it) document(w, cx, it)
} }
crate const BASIC_KEYWORDS: &'static str = "rust, rustlang, rust-lang"; crate const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
fn make_item_keywords(it: &clean::Item) -> String { fn make_item_keywords(it: &clean::Item) -> String {
format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap()) format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())

View file

@ -185,7 +185,7 @@ macro_rules! eprintln {
/// builds or when debugging in release mode is significantly faster. /// builds or when debugging in release mode is significantly faster.
/// ///
/// Note that the macro is intended as a debugging tool and therefore you /// Note that the macro is intended as a debugging tool and therefore you
/// should avoid having uses of it in version control for longer periods. /// should avoid having uses of it in version control for long periods.
/// Use cases involving debug output that should be added to version control /// Use cases involving debug output that should be added to version control
/// are better served by macros such as [`debug!`] from the [`log`] crate. /// are better served by macros such as [`debug!`] from the [`log`] crate.
/// ///

View file

@ -19,9 +19,9 @@ cfg_if::cfg_if! {
if #[cfg(target_os = "fuchsia")] { if #[cfg(target_os = "fuchsia")] {
// fuchsia doesn't have /dev/null // fuchsia doesn't have /dev/null
} else if #[cfg(target_os = "redox")] { } else if #[cfg(target_os = "redox")] {
const DEV_NULL: &'static str = "null:\0"; const DEV_NULL: &str = "null:\0";
} else { } else {
const DEV_NULL: &'static str = "/dev/null\0"; const DEV_NULL: &str = "/dev/null\0";
} }
} }

View file

@ -96,7 +96,7 @@ use time::TestExecTime;
// Process exit code to be used to indicate test failures. // Process exit code to be used to indicate test failures.
const ERROR_EXIT_CODE: i32 = 101; const ERROR_EXIT_CODE: i32 = 101;
const SECONDARY_TEST_INVOKER_VAR: &'static str = "__RUST_TEST_INVOKE"; const SECONDARY_TEST_INVOKER_VAR: &str = "__RUST_TEST_INVOKE";
// The default console test runner. It accepts the command line // The default console test runner. It accepts the command line
// arguments and a vector of test_descs. // arguments and a vector of test_descs.
@ -158,7 +158,7 @@ pub fn test_main_static_abort(tests: &[&TestDescAndFn]) {
.filter(|test| test.desc.name.as_slice() == name) .filter(|test| test.desc.name.as_slice() == name)
.map(make_owned_test) .map(make_owned_test)
.next() .next()
.expect(&format!("couldn't find a test with the provided name '{}'", name)); .unwrap_or_else(|| panic!("couldn't find a test with the provided name '{}'", name));
let TestDescAndFn { desc, testfn } = test; let TestDescAndFn { desc, testfn } = test;
let testfn = match testfn { let testfn = match testfn {
StaticTestFn(f) => f, StaticTestFn(f) => f,