1
Fork 0

librustc: remove unnecessary as_slice() calls

This commit is contained in:
Jorge Aparicio 2014-11-27 13:53:34 -05:00
parent ae555a99a6
commit 00f3c3f7a7
15 changed files with 38 additions and 40 deletions

View file

@ -902,7 +902,7 @@ impl NonSnakeCase {
let mut buf = String::new(); let mut buf = String::new();
if s.is_empty() { continue; } if s.is_empty() { continue; }
for ch in s.chars() { for ch in s.chars() {
if !buf.is_empty() && buf.as_slice() != "'" if !buf.is_empty() && buf != "'"
&& ch.is_uppercase() && ch.is_uppercase()
&& !last_upper { && !last_upper {
words.push(buf); words.push(buf);

View file

@ -277,7 +277,7 @@ fn visit_item(e: &Env, i: &ast::Item) {
fn register_native_lib(sess: &Session, span: Option<Span>, name: String, fn register_native_lib(sess: &Session, span: Option<Span>, name: String,
kind: cstore::NativeLibaryKind) { kind: cstore::NativeLibaryKind) {
if name.as_slice().is_empty() { if name.is_empty() {
match span { match span {
Some(span) => { Some(span) => {
sess.span_err(span, "#[link(name = \"\")] given with \ sess.span_err(span, "#[link(name = \"\")] given with \
@ -304,7 +304,7 @@ fn existing_match(e: &Env, name: &str,
hash: Option<&Svh>) -> Option<ast::CrateNum> { hash: Option<&Svh>) -> Option<ast::CrateNum> {
let mut ret = None; let mut ret = None;
e.sess.cstore.iter_crate_data(|cnum, data| { e.sess.cstore.iter_crate_data(|cnum, data| {
if data.name.as_slice() != name { return } if data.name != name { return }
match hash { match hash {
Some(hash) if *hash == data.hash() => { ret = Some(cnum); return } Some(hash) if *hash == data.hash() => { ret = Some(cnum); return }

View file

@ -162,7 +162,7 @@ impl CStore {
let mut ordering = Vec::new(); let mut ordering = Vec::new();
fn visit(cstore: &CStore, cnum: ast::CrateNum, fn visit(cstore: &CStore, cnum: ast::CrateNum,
ordering: &mut Vec<ast::CrateNum>) { ordering: &mut Vec<ast::CrateNum>) {
if ordering.as_slice().contains(&cnum) { return } if ordering.contains(&cnum) { return }
let meta = cstore.get_crate_data(cnum); let meta = cstore.get_crate_data(cnum);
for (_, &dep) in meta.cnum_map.iter() { for (_, &dep) in meta.cnum_map.iter() {
visit(cstore, dep, ordering); visit(cstore, dep, ordering);
@ -173,7 +173,6 @@ impl CStore {
visit(self, num, &mut ordering); visit(self, num, &mut ordering);
} }
ordering.as_mut_slice().reverse(); ordering.as_mut_slice().reverse();
let ordering = ordering.as_slice();
let mut libs = self.used_crate_sources.borrow() let mut libs = self.used_crate_sources.borrow()
.iter() .iter()
.map(|src| (src.cnum, match prefer { .map(|src| (src.cnum, match prefer {

View file

@ -474,7 +474,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
// encoded metadata for static methods relative to Bar, // encoded metadata for static methods relative to Bar,
// but not yet for Foo. // but not yet for Foo.
// //
if path_differs || original_name.get() != exp.name.as_slice() { if path_differs || original_name.get() != exp.name {
if !encode_reexported_static_base_methods(ecx, rbml_w, exp) { if !encode_reexported_static_base_methods(ecx, rbml_w, exp) {
if encode_reexported_static_trait_methods(ecx, rbml_w, exp) { if encode_reexported_static_trait_methods(ecx, rbml_w, exp) {
debug!("(encode reexported static methods) {} [trait]", debug!("(encode reexported static methods) {} [trait]",

View file

@ -214,7 +214,7 @@ pub fn rust_path() -> Vec<Path> {
let mut env_rust_path: Vec<Path> = match get_rust_path() { let mut env_rust_path: Vec<Path> = match get_rust_path() {
Some(env_path) => { Some(env_path) => {
let env_path_components = let env_path_components =
env_path.as_slice().split_str(PATH_ENTRY_SEPARATOR); env_path.split_str(PATH_ENTRY_SEPARATOR);
env_path_components.map(|s| Path::new(s)).collect() env_path_components.map(|s| Path::new(s)).collect()
} }
None => Vec::new() None => Vec::new()

View file

@ -545,7 +545,7 @@ impl<'a> Context<'a> {
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool { fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
if self.should_match_name { if self.should_match_name {
match decoder::maybe_get_crate_name(crate_data) { match decoder::maybe_get_crate_name(crate_data) {
Some(ref name) if self.crate_name == name.as_slice() => {} Some(ref name) if self.crate_name == *name => {}
_ => { info!("Rejecting via crate name"); return false } _ => { info!("Rejecting via crate name"); return false }
} }
} }
@ -560,7 +560,7 @@ impl<'a> Context<'a> {
None => { debug!("triple not present"); return false } None => { debug!("triple not present"); return false }
Some(t) => t, Some(t) => t,
}; };
if triple.as_slice() != self.triple { if triple != self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple); info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch { self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(), path: libpath.clone(),
@ -743,7 +743,7 @@ fn get_metadata_section_imp(is_osx: bool, filename: &Path) -> Result<MetadataBlo
let name = String::from_raw_buf_len(name_buf as *const u8, let name = String::from_raw_buf_len(name_buf as *const u8,
name_len as uint); name_len as uint);
debug!("get_metadata_section: name {}", name); debug!("get_metadata_section: name {}", name);
if read_meta_section_name(is_osx).as_slice() == name.as_slice() { if read_meta_section_name(is_osx) == name {
let cbuf = llvm::LLVMGetSectionContents(si.llsi); let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as uint; let csz = llvm::LLVMGetSectionSize(si.llsi) as uint;
let cvbuf: *const u8 = cbuf as *const u8; let cvbuf: *const u8 = cbuf as *const u8;

View file

@ -234,15 +234,15 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
debug!("fragments 3 assigned: {}", path_lps(assigned.as_slice())); debug!("fragments 3 assigned: {}", path_lps(assigned.as_slice()));
// Fourth, build the leftover from the moved, assigned, and parents. // Fourth, build the leftover from the moved, assigned, and parents.
for m in moved.as_slice().iter() { for m in moved.iter() {
let lp = this.path_loan_path(*m); let lp = this.path_loan_path(*m);
add_fragment_siblings(this, tcx, &mut unmoved, lp, None); add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
} }
for a in assigned.as_slice().iter() { for a in assigned.iter() {
let lp = this.path_loan_path(*a); let lp = this.path_loan_path(*a);
add_fragment_siblings(this, tcx, &mut unmoved, lp, None); add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
} }
for p in parents.as_slice().iter() { for p in parents.iter() {
let lp = this.path_loan_path(*p); let lp = this.path_loan_path(*p);
add_fragment_siblings(this, tcx, &mut unmoved, lp, None); add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
} }

View file

@ -31,14 +31,14 @@ pub struct LabelledCFG<'a, 'ast: 'a> {
fn replace_newline_with_backslash_l(s: String) -> String { fn replace_newline_with_backslash_l(s: String) -> String {
// Replacing newlines with \\l causes each line to be left-aligned, // Replacing newlines with \\l causes each line to be left-aligned,
// improving presentation of (long) pretty-printed expressions. // improving presentation of (long) pretty-printed expressions.
if s.as_slice().contains("\n") { if s.contains("\n") {
let mut s = s.replace("\n", "\\l"); let mut s = s.replace("\n", "\\l");
// Apparently left-alignment applies to the line that precedes // Apparently left-alignment applies to the line that precedes
// \l, not the line that follows; so, add \l at end of string // \l, not the line that follows; so, add \l at end of string
// if not already present, ensuring last line gets left-aligned // if not already present, ensuring last line gets left-aligned
// as well. // as well.
let mut last_two: Vec<_> = let mut last_two: Vec<_> =
s.as_slice().chars().rev().take(2).collect(); s.chars().rev().take(2).collect();
last_two.reverse(); last_two.reverse();
if last_two != ['\\', 'l'] { if last_two != ['\\', 'l'] {
s.push_str("\\l"); s.push_str("\\l");

View file

@ -321,7 +321,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool {
for attr in lint::gather_attrs(attrs).into_iter() { for attr in lint::gather_attrs(attrs).into_iter() {
match attr { match attr {
Ok((ref name, lint::Allow, _)) Ok((ref name, lint::Allow, _))
if name.get() == dead_code.as_slice() => return true, if name.get() == dead_code => return true,
_ => (), _ => (),
} }
} }

View file

@ -1065,7 +1065,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
// the same bindings, and we also consider the first pattern to be // the same bindings, and we also consider the first pattern to be
// the "authoritative" set of ids // the "authoritative" set of ids
let arm_succ = let arm_succ =
self.define_bindings_in_arm_pats(arm.pats.as_slice().head().map(|p| &**p), self.define_bindings_in_arm_pats(arm.pats.head().map(|p| &**p),
guard_succ); guard_succ);
self.merge_from_succ(ln, arm_succ, first_merge); self.merge_from_succ(ln, arm_succ, first_merge);
first_merge = false; first_merge = false;
@ -1431,7 +1431,7 @@ fn check_arm(this: &mut Liveness, arm: &ast::Arm) {
// only consider the first pattern; any later patterns must have // only consider the first pattern; any later patterns must have
// the same bindings, and we also consider the first pattern to be // the same bindings, and we also consider the first pattern to be
// the "authoritative" set of ids // the "authoritative" set of ids
this.arm_pats_bindings(arm.pats.as_slice().head().map(|p| &**p), |this, ln, var, sp, id| { this.arm_pats_bindings(arm.pats.head().map(|p| &**p), |this, ln, var, sp, id| {
this.warn_about_unused(sp, id, ln, var); this.warn_about_unused(sp, id, ln, var);
}); });
visit::walk_arm(this, arm); visit::walk_arm(this, arm);

View file

@ -1668,7 +1668,7 @@ impl<'a> Resolver<'a> {
let module_path = match view_path.node { let module_path = match view_path.node {
ViewPathSimple(_, ref full_path, _) => { ViewPathSimple(_, ref full_path, _) => {
full_path.segments full_path.segments
.as_slice().init() .init()
.iter().map(|ident| ident.identifier.name) .iter().map(|ident| ident.identifier.name)
.collect() .collect()
} }
@ -1739,7 +1739,7 @@ impl<'a> Resolver<'a> {
continue; continue;
} }
}; };
let module_path = module_path.as_slice().init(); let module_path = module_path.init();
(module_path.to_vec(), name) (module_path.to_vec(), name)
} }
}; };
@ -3735,12 +3735,12 @@ impl<'a> Resolver<'a> {
.codemap() .codemap()
.span_to_snippet((*imports)[index].span) .span_to_snippet((*imports)[index].span)
.unwrap(); .unwrap();
if sn.as_slice().contains("::") { if sn.contains("::") {
self.resolve_error((*imports)[index].span, self.resolve_error((*imports)[index].span,
"unresolved import"); "unresolved import");
} else { } else {
let err = format!("unresolved import (maybe you meant `{}::*`?)", let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn.as_slice().slice(0, sn.len())); sn.slice(0, sn.len()));
self.resolve_error((*imports)[index].span, err.as_slice()); self.resolve_error((*imports)[index].span, err.as_slice());
} }
} }
@ -5748,7 +5748,7 @@ impl<'a> Resolver<'a> {
}); });
if method_scope && token::get_name(self.self_name).get() if method_scope && token::get_name(self.self_name).get()
== wrong_name.as_slice() { == wrong_name {
self.resolve_error( self.resolve_error(
expr.span, expr.span,
"`self` is not available \ "`self` is not available \

View file

@ -2907,7 +2907,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
res = res | TC::ReachesFfiUnsafe; res = res | TC::ReachesFfiUnsafe;
} }
match repr_hints.as_slice().get(0) { match repr_hints.get(0) {
Some(h) => if !h.is_ffi_safe() { Some(h) => if !h.is_ffi_safe() {
res = res | TC::ReachesFfiUnsafe; res = res | TC::ReachesFfiUnsafe;
}, },
@ -3566,23 +3566,23 @@ pub fn positional_element_ty<'tcx>(cx: &ctxt<'tcx>,
variant: Option<ast::DefId>) -> Option<Ty<'tcx>> { variant: Option<ast::DefId>) -> Option<Ty<'tcx>> {
match (&ty.sty, variant) { match (&ty.sty, variant) {
(&ty_tup(ref v), None) => v.as_slice().get(i).map(|&t| t), (&ty_tup(ref v), None) => v.get(i).map(|&t| t),
(&ty_struct(def_id, ref substs), None) => lookup_struct_fields(cx, def_id) (&ty_struct(def_id, ref substs), None) => lookup_struct_fields(cx, def_id)
.as_slice().get(i) .get(i)
.map(|&t|lookup_item_type(cx, t.id).ty.subst(cx, substs)), .map(|&t|lookup_item_type(cx, t.id).ty.subst(cx, substs)),
(&ty_enum(def_id, ref substs), Some(variant_def_id)) => { (&ty_enum(def_id, ref substs), Some(variant_def_id)) => {
let variant_info = enum_variant_with_id(cx, def_id, variant_def_id); let variant_info = enum_variant_with_id(cx, def_id, variant_def_id);
variant_info.args.as_slice().get(i).map(|t|t.subst(cx, substs)) variant_info.args.get(i).map(|t|t.subst(cx, substs))
} }
(&ty_enum(def_id, ref substs), None) => { (&ty_enum(def_id, ref substs), None) => {
assert!(enum_is_univariant(cx, def_id)); assert!(enum_is_univariant(cx, def_id));
let enum_variants = enum_variants(cx, def_id); let enum_variants = enum_variants(cx, def_id);
let variant_info = &(*enum_variants)[0]; let variant_info = &(*enum_variants)[0];
variant_info.args.as_slice().get(i).map(|t|t.subst(cx, substs)) variant_info.args.get(i).map(|t|t.subst(cx, substs))
} }
_ => None _ => None

View file

@ -512,13 +512,13 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
{ {
let mut cg = basic_codegen_options(); let mut cg = basic_codegen_options();
for option in matches.opt_strs("C").into_iter() { for option in matches.opt_strs("C").into_iter() {
let mut iter = option.as_slice().splitn(1, '='); let mut iter = option.splitn(1, '=');
let key = iter.next().unwrap(); let key = iter.next().unwrap();
let value = iter.next(); let value = iter.next();
let option_to_lookup = key.replace("-", "_"); let option_to_lookup = key.replace("-", "_");
let mut found = false; let mut found = false;
for &(candidate, setter, opt_type_desc, _) in CG_OPTIONS.iter() { for &(candidate, setter, opt_type_desc, _) in CG_OPTIONS.iter() {
if option_to_lookup.as_slice() != candidate { continue } if option_to_lookup != candidate { continue }
if !setter(&mut cg, value) { if !setter(&mut cg, value) {
match (value, opt_type_desc) { match (value, opt_type_desc) {
(Some(..), None) => { (Some(..), None) => {
@ -714,7 +714,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
for &level in [lint::Allow, lint::Warn, lint::Deny, lint::Forbid].iter() { for &level in [lint::Allow, lint::Warn, lint::Deny, lint::Forbid].iter() {
for lint_name in matches.opt_strs(level.as_str()).into_iter() { for lint_name in matches.opt_strs(level.as_str()).into_iter() {
if lint_name.as_slice() == "help" { if lint_name == "help" {
describe_lints = true; describe_lints = true;
} else { } else {
lint_opts.push((lint_name.replace("-", "_").into_string(), level)); lint_opts.push((lint_name.replace("-", "_").into_string(), level));
@ -727,9 +727,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let debug_map = debugging_opts_map(); let debug_map = debugging_opts_map();
for debug_flag in debug_flags.iter() { for debug_flag in debug_flags.iter() {
let mut this_bit = 0; let mut this_bit = 0;
for tuple in debug_map.iter() { for &(name, _, bit) in debug_map.iter() {
let (name, bit) = match *tuple { (ref a, _, b) => (a, b) }; if name == *debug_flag {
if *name == debug_flag.as_slice() {
this_bit = bit; this_bit = bit;
break; break;
} }
@ -749,7 +748,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
if !parse_only && !no_trans { if !parse_only && !no_trans {
let unparsed_output_types = matches.opt_strs("emit"); let unparsed_output_types = matches.opt_strs("emit");
for unparsed_output_type in unparsed_output_types.iter() { for unparsed_output_type in unparsed_output_types.iter() {
for part in unparsed_output_type.as_slice().split(',') { for part in unparsed_output_type.split(',') {
let output_type = match part.as_slice() { let output_type = match part.as_slice() {
"asm" => OutputTypeAssembly, "asm" => OutputTypeAssembly,
"ir" => OutputTypeLlvmAssembly, "ir" => OutputTypeLlvmAssembly,
@ -824,7 +823,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
}).collect(); }).collect();
let libs = matches.opt_strs("l").into_iter().map(|s| { let libs = matches.opt_strs("l").into_iter().map(|s| {
let mut parts = s.as_slice().rsplitn(1, ':'); let mut parts = s.rsplitn(1, ':');
let kind = parts.next().unwrap(); let kind = parts.next().unwrap();
let (name, kind) = match (parts.next(), kind) { let (name, kind) = match (parts.next(), kind) {
(None, name) | (None, name) |
@ -875,7 +874,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let mut externs = HashMap::new(); let mut externs = HashMap::new();
for arg in matches.opt_strs("extern").iter() { for arg in matches.opt_strs("extern").iter() {
let mut parts = arg.as_slice().splitn(1, '='); let mut parts = arg.splitn(1, '=');
let name = match parts.next() { let name = match parts.next() {
Some(s) => s, Some(s) => s,
None => early_error("--extern value must not be empty"), None => early_error("--extern value must not be empty"),
@ -925,7 +924,7 @@ pub fn parse_crate_types_from_list(list_list: Vec<String>) -> Result<Vec<CrateTy
let mut crate_types: Vec<CrateType> = Vec::new(); let mut crate_types: Vec<CrateType> = Vec::new();
for unparsed_crate_type in list_list.iter() { for unparsed_crate_type in list_list.iter() {
for part in unparsed_crate_type.as_slice().split(',') { for part in unparsed_crate_type.split(',') {
let new_part = match part { let new_part = match part {
"lib" => default_lib_output(), "lib" => default_lib_output(),
"rlib" => CrateTypeRlib, "rlib" => CrateTypeRlib,

View file

@ -257,7 +257,7 @@ pub fn build_session_(sopts: config::Options,
let can_print_warnings = sopts.lint_opts let can_print_warnings = sopts.lint_opts
.iter() .iter()
.filter(|&&(ref key, _)| key.as_slice() == "warnings") .filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level != lint::Allow) .map(|&(_, ref level)| *level != lint::Allow)
.last() .last()
.unwrap_or(true); .unwrap_or(true);

View file

@ -538,7 +538,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String { pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
let mut s = typ.repr(cx).to_string(); let mut s = typ.repr(cx).to_string();
if s.len() >= 32u { if s.len() >= 32u {
s = s.as_slice().slice(0u, 32u).to_string(); s = s.slice(0u, 32u).to_string();
} }
return s; return s;
} }