don't restuct references just to reborrow
This commit is contained in:
parent
35a99eef32
commit
a108d55ce6
22 changed files with 41 additions and 42 deletions
|
@ -425,7 +425,7 @@ fn thin_lto(
|
||||||
info!("going for that thin, thin LTO");
|
info!("going for that thin, thin LTO");
|
||||||
|
|
||||||
let green_modules: FxHashMap<_, _> =
|
let green_modules: FxHashMap<_, _> =
|
||||||
cached_modules.iter().map(|&(_, ref wp)| (wp.cgu_name.clone(), wp.clone())).collect();
|
cached_modules.iter().map(|(_, wp)| (wp.cgu_name.clone(), wp.clone())).collect();
|
||||||
|
|
||||||
let full_scope_len = modules.len() + serialized_modules.len() + cached_modules.len();
|
let full_scope_len = modules.len() + serialized_modules.len() + cached_modules.len();
|
||||||
let mut thin_buffers = Vec::with_capacity(modules.len());
|
let mut thin_buffers = Vec::with_capacity(modules.len());
|
||||||
|
|
|
@ -722,7 +722,7 @@ fn link_natively<'a>(
|
||||||
|
|
||||||
linker::disable_localization(&mut cmd);
|
linker::disable_localization(&mut cmd);
|
||||||
|
|
||||||
for &(ref k, ref v) in sess.target.link_env.as_ref() {
|
for (k, v) in sess.target.link_env.as_ref() {
|
||||||
cmd.env(k.as_ref(), v.as_ref());
|
cmd.env(k.as_ref(), v.as_ref());
|
||||||
}
|
}
|
||||||
for k in sess.target.link_env_remove.as_ref() {
|
for k in sess.target.link_env_remove.as_ref() {
|
||||||
|
|
|
@ -108,7 +108,7 @@ pub fn get_linker<'a>(
|
||||||
if sess.target.is_like_msvc {
|
if sess.target.is_like_msvc {
|
||||||
if let Some(ref tool) = msvc_tool {
|
if let Some(ref tool) = msvc_tool {
|
||||||
cmd.args(tool.args());
|
cmd.args(tool.args());
|
||||||
for &(ref k, ref v) in tool.env() {
|
for (k, v) in tool.env() {
|
||||||
if k == "PATH" {
|
if k == "PATH" {
|
||||||
new_path.extend(env::split_paths(v));
|
new_path.extend(env::split_paths(v));
|
||||||
msvc_changed_path = true;
|
msvc_changed_path = true;
|
||||||
|
|
|
@ -332,7 +332,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
Immediate::new_slice(ptr, length.eval_usize(*self.tcx, self.param_env), self);
|
Immediate::new_slice(ptr, length.eval_usize(*self.tcx, self.param_env), self);
|
||||||
self.write_immediate(val, dest)
|
self.write_immediate(val, dest)
|
||||||
}
|
}
|
||||||
(&ty::Dynamic(ref data_a, ..), &ty::Dynamic(ref data_b, ..)) => {
|
(ty::Dynamic(data_a, ..), ty::Dynamic(data_b, ..)) => {
|
||||||
let val = self.read_immediate(src)?;
|
let val = self.read_immediate(src)?;
|
||||||
if data_a.principal() == data_b.principal() {
|
if data_a.principal() == data_b.principal() {
|
||||||
// A NOP cast that doesn't actually change anything, should be allowed even with mismatching vtables.
|
// A NOP cast that doesn't actually change anything, should be allowed even with mismatching vtables.
|
||||||
|
|
|
@ -468,7 +468,7 @@ fn check_nested_occurrences(
|
||||||
// We check that the meta-variable is correctly used.
|
// We check that the meta-variable is correctly used.
|
||||||
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
|
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
|
||||||
}
|
}
|
||||||
(NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
|
(NestedMacroState::MacroName, TokenTree::Delimited(_, del))
|
||||||
if del.delim == Delimiter::Parenthesis =>
|
if del.delim == Delimiter::Parenthesis =>
|
||||||
{
|
{
|
||||||
state = NestedMacroState::MacroNameParen;
|
state = NestedMacroState::MacroNameParen;
|
||||||
|
@ -483,7 +483,7 @@ fn check_nested_occurrences(
|
||||||
valid,
|
valid,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
(NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
|
(NestedMacroState::MacroNameParen, TokenTree::Delimited(_, del))
|
||||||
if del.delim == Delimiter::Brace =>
|
if del.delim == Delimiter::Brace =>
|
||||||
{
|
{
|
||||||
state = NestedMacroState::Empty;
|
state = NestedMacroState::Empty;
|
||||||
|
|
|
@ -792,7 +792,7 @@ impl<'tt> FirstSets<'tt> {
|
||||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||||
let subfirst_owned;
|
let subfirst_owned;
|
||||||
let subfirst = match self.first.get(&sp.entire()) {
|
let subfirst = match self.first.get(&sp.entire()) {
|
||||||
Some(&Some(ref subfirst)) => subfirst,
|
Some(Some(subfirst)) => subfirst,
|
||||||
Some(&None) => {
|
Some(&None) => {
|
||||||
subfirst_owned = self.first(&seq_rep.tts);
|
subfirst_owned = self.first(&seq_rep.tts);
|
||||||
&subfirst_owned
|
&subfirst_owned
|
||||||
|
|
|
@ -1275,7 +1275,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
};
|
};
|
||||||
|
|
||||||
match (&expected_ty.kind(), &checked_ty.kind()) {
|
match (&expected_ty.kind(), &checked_ty.kind()) {
|
||||||
(&ty::Int(ref exp), &ty::Int(ref found)) => {
|
(ty::Int(exp), ty::Int(found)) => {
|
||||||
let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width())
|
let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width())
|
||||||
{
|
{
|
||||||
(Some(exp), Some(found)) if exp < found => (true, false),
|
(Some(exp), Some(found)) if exp < found => (true, false),
|
||||||
|
@ -1288,7 +1288,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible);
|
suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible);
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
(&ty::Uint(ref exp), &ty::Uint(ref found)) => {
|
(ty::Uint(exp), ty::Uint(found)) => {
|
||||||
let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width())
|
let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width())
|
||||||
{
|
{
|
||||||
(Some(exp), Some(found)) if exp < found => (true, false),
|
(Some(exp), Some(found)) if exp < found => (true, false),
|
||||||
|
@ -1321,7 +1321,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible);
|
suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible);
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
(&ty::Float(ref exp), &ty::Float(ref found)) => {
|
(ty::Float(exp), ty::Float(found)) => {
|
||||||
if found.bit_width() < exp.bit_width() {
|
if found.bit_width() < exp.bit_width() {
|
||||||
suggest_to_change_suffix_or_into(err, false, true);
|
suggest_to_change_suffix_or_into(err, false, true);
|
||||||
} else if literal_is_ty_suffixed(expr) {
|
} else if literal_is_ty_suffixed(expr) {
|
||||||
|
@ -1357,7 +1357,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
(&ty::Float(ref exp), &ty::Uint(ref found)) => {
|
(ty::Float(exp), ty::Uint(found)) => {
|
||||||
// if `found` is `None` (meaning found is `usize`), don't suggest `.into()`
|
// if `found` is `None` (meaning found is `usize`), don't suggest `.into()`
|
||||||
if exp.bit_width() > found.bit_width().unwrap_or(256) {
|
if exp.bit_width() > found.bit_width().unwrap_or(256) {
|
||||||
err.multipart_suggestion_verbose(
|
err.multipart_suggestion_verbose(
|
||||||
|
@ -1386,7 +1386,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
(&ty::Float(ref exp), &ty::Int(ref found)) => {
|
(ty::Float(exp), ty::Int(found)) => {
|
||||||
// if `found` is `None` (meaning found is `isize`), don't suggest `.into()`
|
// if `found` is `None` (meaning found is `isize`), don't suggest `.into()`
|
||||||
if exp.bit_width() > found.bit_width().unwrap_or(256) {
|
if exp.bit_width() > found.bit_width().unwrap_or(256) {
|
||||||
err.multipart_suggestion_verbose(
|
err.multipart_suggestion_verbose(
|
||||||
|
|
|
@ -1874,7 +1874,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// I don't use 'is_range_literal' because only double-sided, half-open ranges count.
|
// I don't use 'is_range_literal' because only double-sided, half-open ranges count.
|
||||||
if let ExprKind::Struct(
|
if let ExprKind::Struct(
|
||||||
QPath::LangItem(LangItem::Range, ..),
|
QPath::LangItem(LangItem::Range, ..),
|
||||||
&[ref range_start, ref range_end],
|
[range_start, range_end],
|
||||||
_,
|
_,
|
||||||
) = last_expr_field.expr.kind
|
) = last_expr_field.expr.kind
|
||||||
&& let variant_field =
|
&& let variant_field =
|
||||||
|
|
|
@ -754,7 +754,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&hir::FnRetTy::Return(ref ty) => {
|
hir::FnRetTy::Return(ty) => {
|
||||||
// Only point to return type if the expected type is the return type, as if they
|
// Only point to return type if the expected type is the return type, as if they
|
||||||
// are not, the expectation must have been caused by something else.
|
// are not, the expectation must have been caused by something else.
|
||||||
debug!("suggest_missing_return_type: return type {:?} node {:?}", ty, ty.kind);
|
debug!("suggest_missing_return_type: return type {:?} node {:?}", ty, ty.kind);
|
||||||
|
|
|
@ -249,7 +249,7 @@ fn dump_graph(query: &DepGraphQuery) {
|
||||||
// dump a .txt file with just the edges:
|
// dump a .txt file with just the edges:
|
||||||
let txt_path = format!("{}.txt", path);
|
let txt_path = format!("{}.txt", path);
|
||||||
let mut file = BufWriter::new(File::create(&txt_path).unwrap());
|
let mut file = BufWriter::new(File::create(&txt_path).unwrap());
|
||||||
for &(ref source, ref target) in &edges {
|
for (source, target) in &edges {
|
||||||
write!(file, "{:?} -> {:?}\n", source, target).unwrap();
|
write!(file, "{:?} -> {:?}\n", source, target).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -200,7 +200,7 @@ impl<'tcx> ProjectionCache<'_, 'tcx> {
|
||||||
pub fn complete(&mut self, key: ProjectionCacheKey<'tcx>, result: EvaluationResult) {
|
pub fn complete(&mut self, key: ProjectionCacheKey<'tcx>, result: EvaluationResult) {
|
||||||
let mut map = self.map();
|
let mut map = self.map();
|
||||||
match map.get(&key) {
|
match map.get(&key) {
|
||||||
Some(&ProjectionCacheEntry::NormalizedTy { ref ty, complete: _ }) => {
|
Some(ProjectionCacheEntry::NormalizedTy { ty, complete: _ }) => {
|
||||||
info!("ProjectionCacheEntry::complete({:?}) - completing {:?}", key, ty);
|
info!("ProjectionCacheEntry::complete({:?}) - completing {:?}", key, ty);
|
||||||
let mut ty = ty.clone();
|
let mut ty = ty.clone();
|
||||||
if result.must_apply_considering_regions() {
|
if result.must_apply_considering_regions() {
|
||||||
|
|
|
@ -438,18 +438,18 @@ impl LintStore {
|
||||||
return CheckLintNameResult::Tool(Ok(&lint_ids));
|
return CheckLintNameResult::Tool(Ok(&lint_ids));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(&Id(ref id)) => return CheckLintNameResult::Tool(Ok(slice::from_ref(id))),
|
Some(Id(id)) => return CheckLintNameResult::Tool(Ok(slice::from_ref(id))),
|
||||||
// If the lint was registered as removed or renamed by the lint tool, we don't need
|
// If the lint was registered as removed or renamed by the lint tool, we don't need
|
||||||
// to treat tool_lints and rustc lints different and can use the code below.
|
// to treat tool_lints and rustc lints different and can use the code below.
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match self.by_name.get(&complete_name) {
|
match self.by_name.get(&complete_name) {
|
||||||
Some(&Renamed(ref new_name, _)) => CheckLintNameResult::Warning(
|
Some(Renamed(new_name, _)) => CheckLintNameResult::Warning(
|
||||||
format!("lint `{}` has been renamed to `{}`", complete_name, new_name),
|
format!("lint `{}` has been renamed to `{}`", complete_name, new_name),
|
||||||
Some(new_name.to_owned()),
|
Some(new_name.to_owned()),
|
||||||
),
|
),
|
||||||
Some(&Removed(ref reason)) => CheckLintNameResult::Warning(
|
Some(Removed(reason)) => CheckLintNameResult::Warning(
|
||||||
format!("lint `{}` has been removed: {}", complete_name, reason),
|
format!("lint `{}` has been removed: {}", complete_name, reason),
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
@ -470,7 +470,7 @@ impl LintStore {
|
||||||
CheckLintNameResult::Ok(&lint_ids)
|
CheckLintNameResult::Ok(&lint_ids)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(&Id(ref id)) => CheckLintNameResult::Ok(slice::from_ref(id)),
|
Some(Id(id)) => CheckLintNameResult::Ok(slice::from_ref(id)),
|
||||||
Some(&Ignored) => CheckLintNameResult::Ok(&[]),
|
Some(&Ignored) => CheckLintNameResult::Ok(&[]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -513,7 +513,7 @@ impl LintStore {
|
||||||
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
|
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(&Id(ref id)) => {
|
Some(Id(id)) => {
|
||||||
CheckLintNameResult::Tool(Err((Some(slice::from_ref(id)), complete_name)))
|
CheckLintNameResult::Tool(Err((Some(slice::from_ref(id)), complete_name)))
|
||||||
}
|
}
|
||||||
Some(other) => {
|
Some(other) => {
|
||||||
|
|
|
@ -1279,7 +1279,7 @@ impl UnusedImportBraces {
|
||||||
fn check_use_tree(&self, cx: &EarlyContext<'_>, use_tree: &ast::UseTree, item: &ast::Item) {
|
fn check_use_tree(&self, cx: &EarlyContext<'_>, use_tree: &ast::UseTree, item: &ast::Item) {
|
||||||
if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
|
if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
|
||||||
// Recursively check nested UseTrees
|
// Recursively check nested UseTrees
|
||||||
for &(ref tree, _) in items {
|
for (tree, _) in items {
|
||||||
self.check_use_tree(cx, tree, item);
|
self.check_use_tree(cx, tree, item);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1849,7 +1849,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
// the assumption that they are numbered 1 to n.
|
// the assumption that they are numbered 1 to n.
|
||||||
// FIXME (#2166): This is not nearly enough to support correct versioning
|
// FIXME (#2166): This is not nearly enough to support correct versioning
|
||||||
// but is enough to get transitive crate dependencies working.
|
// but is enough to get transitive crate dependencies working.
|
||||||
self.lazy_array(deps.iter().map(|&(_, ref dep)| dep))
|
self.lazy_array(deps.iter().map(|(_, dep)| dep))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_lib_features(&mut self) -> LazyArray<(Symbol, Option<Symbol>)> {
|
fn encode_lib_features(&mut self) -> LazyArray<(Symbol, Option<Symbol>)> {
|
||||||
|
@ -1986,7 +1986,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
self.lazy_array(
|
self.lazy_array(
|
||||||
exported_symbols
|
exported_symbols
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&&(ref exported_symbol, _)| match *exported_symbol {
|
.filter(|&(exported_symbol, _)| match *exported_symbol {
|
||||||
ExportedSymbol::NoDefId(symbol_name) => symbol_name != metadata_symbol_name,
|
ExportedSymbol::NoDefId(symbol_name) => symbol_name != metadata_symbol_name,
|
||||||
_ => true,
|
_ => true,
|
||||||
})
|
})
|
||||||
|
|
|
@ -235,7 +235,7 @@ impl<'tcx> Operand<'tcx> {
|
||||||
{
|
{
|
||||||
match self {
|
match self {
|
||||||
&Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty,
|
&Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty,
|
||||||
&Operand::Constant(ref c) => c.literal.ty(),
|
Operand::Constant(c) => c.literal.ty(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ impl FlagComputation {
|
||||||
self.add_flags(TypeFlags::STILL_FURTHER_SPECIALIZABLE);
|
self.add_flags(TypeFlags::STILL_FURTHER_SPECIALIZABLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
&ty::Generator(_, ref substs, _) => {
|
ty::Generator(_, substs, _) => {
|
||||||
let substs = substs.as_generator();
|
let substs = substs.as_generator();
|
||||||
let should_remove_further_specializable =
|
let should_remove_further_specializable =
|
||||||
!self.flags.contains(TypeFlags::STILL_FURTHER_SPECIALIZABLE);
|
!self.flags.contains(TypeFlags::STILL_FURTHER_SPECIALIZABLE);
|
||||||
|
@ -186,7 +186,7 @@ impl FlagComputation {
|
||||||
|
|
||||||
&ty::Slice(tt) => self.add_ty(tt),
|
&ty::Slice(tt) => self.add_ty(tt),
|
||||||
|
|
||||||
&ty::RawPtr(ref m) => {
|
ty::RawPtr(m) => {
|
||||||
self.add_ty(m.ty);
|
self.add_ty(m.ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -428,7 +428,7 @@ pub fn super_relate_tys<'tcx, R: TypeRelation<'tcx>>(
|
||||||
Ok(a)
|
Ok(a)
|
||||||
}
|
}
|
||||||
|
|
||||||
(&ty::Param(ref a_p), &ty::Param(ref b_p)) if a_p.index == b_p.index => Ok(a),
|
(ty::Param(a_p), ty::Param(b_p)) if a_p.index == b_p.index => Ok(a),
|
||||||
|
|
||||||
(ty::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => Ok(a),
|
(ty::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => Ok(a),
|
||||||
|
|
||||||
|
|
|
@ -551,16 +551,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
//
|
//
|
||||||
// FIXME(#29623) we could use PatKind::Range to rule
|
// FIXME(#29623) we could use PatKind::Range to rule
|
||||||
// things out here, in some cases.
|
// things out here, in some cases.
|
||||||
(
|
(TestKind::SwitchInt { switch_ty: _, options }, PatKind::Constant { value })
|
||||||
&TestKind::SwitchInt { switch_ty: _, ref options },
|
if is_switch_ty(match_pair.pattern.ty) =>
|
||||||
&PatKind::Constant { ref value },
|
{
|
||||||
) if is_switch_ty(match_pair.pattern.ty) => {
|
|
||||||
let index = options.get_index_of(value).unwrap();
|
let index = options.get_index_of(value).unwrap();
|
||||||
self.candidate_without_match_pair(match_pair_index, candidate);
|
self.candidate_without_match_pair(match_pair_index, candidate);
|
||||||
Some(index)
|
Some(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
(&TestKind::SwitchInt { switch_ty: _, ref options }, &PatKind::Range(ref range)) => {
|
(TestKind::SwitchInt { switch_ty: _, options }, PatKind::Range(range)) => {
|
||||||
let not_contained =
|
let not_contained =
|
||||||
self.values_not_contained_in_range(&*range, options).unwrap_or(false);
|
self.values_not_contained_in_range(&*range, options).unwrap_or(false);
|
||||||
|
|
||||||
|
@ -578,7 +577,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
|
|
||||||
(
|
(
|
||||||
&TestKind::Len { len: test_len, op: BinOp::Eq },
|
&TestKind::Len { len: test_len, op: BinOp::Eq },
|
||||||
&PatKind::Slice { ref prefix, ref slice, ref suffix },
|
PatKind::Slice { prefix, slice, suffix },
|
||||||
) => {
|
) => {
|
||||||
let pat_len = (prefix.len() + suffix.len()) as u64;
|
let pat_len = (prefix.len() + suffix.len()) as u64;
|
||||||
match (test_len.cmp(&pat_len), slice) {
|
match (test_len.cmp(&pat_len), slice) {
|
||||||
|
@ -615,7 +614,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
|
|
||||||
(
|
(
|
||||||
&TestKind::Len { len: test_len, op: BinOp::Ge },
|
&TestKind::Len { len: test_len, op: BinOp::Ge },
|
||||||
&PatKind::Slice { ref prefix, ref slice, ref suffix },
|
PatKind::Slice { prefix, slice, suffix },
|
||||||
) => {
|
) => {
|
||||||
// the test is `$actual_len >= test_len`
|
// the test is `$actual_len >= test_len`
|
||||||
let pat_len = (prefix.len() + suffix.len()) as u64;
|
let pat_len = (prefix.len() + suffix.len()) as u64;
|
||||||
|
@ -651,7 +650,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(&TestKind::Range(ref test), &PatKind::Range(ref pat)) => {
|
(TestKind::Range(test), PatKind::Range(pat)) => {
|
||||||
use std::cmp::Ordering::*;
|
use std::cmp::Ordering::*;
|
||||||
|
|
||||||
if test == pat {
|
if test == pat {
|
||||||
|
@ -678,7 +677,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
no_overlap
|
no_overlap
|
||||||
}
|
}
|
||||||
|
|
||||||
(&TestKind::Range(ref range), &PatKind::Constant { value }) => {
|
(TestKind::Range(range), &PatKind::Constant { value }) => {
|
||||||
if let Some(false) = self.const_range_contains(&*range, value) {
|
if let Some(false) = self.const_range_contains(&*range, value) {
|
||||||
// `value` is not contained in the testing range,
|
// `value` is not contained in the testing range,
|
||||||
// so `value` can be matched only if this test fails.
|
// so `value` can be matched only if this test fails.
|
||||||
|
|
|
@ -121,7 +121,7 @@ impl<'k> StatCollector<'k> {
|
||||||
|
|
||||||
fn print(&self, title: &str, prefix: &str) {
|
fn print(&self, title: &str, prefix: &str) {
|
||||||
let mut nodes: Vec<_> = self.nodes.iter().collect();
|
let mut nodes: Vec<_> = self.nodes.iter().collect();
|
||||||
nodes.sort_by_key(|&(_, ref node)| node.stats.count * node.stats.size);
|
nodes.sort_by_key(|(_, node)| node.stats.count * node.stats.size);
|
||||||
|
|
||||||
let total_size = nodes.iter().map(|(_, node)| node.stats.count * node.stats.size).sum();
|
let total_size = nodes.iter().map(|(_, node)| node.stats.count * node.stats.size).sum();
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ impl<'k> StatCollector<'k> {
|
||||||
);
|
);
|
||||||
if !node.subnodes.is_empty() {
|
if !node.subnodes.is_empty() {
|
||||||
let mut subnodes: Vec<_> = node.subnodes.iter().collect();
|
let mut subnodes: Vec<_> = node.subnodes.iter().collect();
|
||||||
subnodes.sort_by_key(|&(_, ref subnode)| subnode.count * subnode.size);
|
subnodes.sort_by_key(|(_, subnode)| subnode.count * subnode.size);
|
||||||
|
|
||||||
for (label, subnode) in subnodes {
|
for (label, subnode) in subnodes {
|
||||||
let size = subnode.count * subnode.size;
|
let size = subnode.count * subnode.size;
|
||||||
|
|
|
@ -576,7 +576,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||||
// Ensure there is at most one `self` in the list
|
// Ensure there is at most one `self` in the list
|
||||||
let self_spans = items
|
let self_spans = items
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|&(ref use_tree, _)| {
|
.filter_map(|(use_tree, _)| {
|
||||||
if let ast::UseTreeKind::Simple(..) = use_tree.kind {
|
if let ast::UseTreeKind::Simple(..) = use_tree.kind {
|
||||||
if use_tree.ident().name == kw::SelfLower {
|
if use_tree.ident().name == kw::SelfLower {
|
||||||
return Some(use_tree.span);
|
return Some(use_tree.span);
|
||||||
|
|
|
@ -1323,7 +1323,7 @@ pub fn build_session(
|
||||||
let warnings_allow = sopts
|
let warnings_allow = sopts
|
||||||
.lint_opts
|
.lint_opts
|
||||||
.iter()
|
.iter()
|
||||||
.rfind(|&&(ref key, _)| *key == "warnings")
|
.rfind(|&(key, _)| *key == "warnings")
|
||||||
.map_or(false, |&(_, level)| level == lint::Allow);
|
.map_or(false, |&(_, level)| level == lint::Allow);
|
||||||
let cap_lints_allow = sopts.lint_cap.map_or(false, |cap| cap == lint::Allow);
|
let cap_lints_allow = sopts.lint_cap.map_or(false, |cap| cap == lint::Allow);
|
||||||
let can_emit_warnings = !(warnings_allow || cap_lints_allow);
|
let can_emit_warnings = !(warnings_allow || cap_lints_allow);
|
||||||
|
|
|
@ -226,7 +226,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
||||||
let arg_length = arguments.len();
|
let arg_length = arguments.len();
|
||||||
let distinct = matches!(other, &[ArgKind::Tuple(..)]);
|
let distinct = matches!(other, &[ArgKind::Tuple(..)]);
|
||||||
match (arg_length, arguments.get(0)) {
|
match (arg_length, arguments.get(0)) {
|
||||||
(1, Some(&ArgKind::Tuple(_, ref fields))) => {
|
(1, Some(ArgKind::Tuple(_, fields))) => {
|
||||||
format!("a single {}-tuple as argument", fields.len())
|
format!("a single {}-tuple as argument", fields.len())
|
||||||
}
|
}
|
||||||
_ => format!(
|
_ => format!(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue