[std::vec] Rename .last_opt() to .last(), drop the old .last() behavior
This commit is contained in:
parent
add8f9680e
commit
aa66b91767
27 changed files with 89 additions and 112 deletions
|
@ -385,7 +385,7 @@ impl Integer for BigUint {
|
|||
}
|
||||
|
||||
let mut shift = 0;
|
||||
let mut n = *other.data.last();
|
||||
let mut n = *other.data.last().unwrap();
|
||||
while n < (1 << BigDigit::bits - 2) {
|
||||
n <<= 1;
|
||||
shift += 1;
|
||||
|
@ -434,7 +434,7 @@ impl Integer for BigUint {
|
|||
}
|
||||
|
||||
let an = a.data.slice(a.data.len() - n, a.data.len());
|
||||
let bn = *b.data.last();
|
||||
let bn = *b.data.last().unwrap();
|
||||
let mut d = ~[];
|
||||
let mut carry = 0;
|
||||
for elt in an.rev_iter() {
|
||||
|
@ -798,7 +798,7 @@ impl BigUint {
|
|||
/// Determines the fewest bits necessary to express the `BigUint`.
|
||||
pub fn bits(&self) -> uint {
|
||||
if self.is_zero() { return 0; }
|
||||
let zeros = self.data.last().leading_zeros();
|
||||
let zeros = self.data.last().unwrap().leading_zeros();
|
||||
return self.data.len()*BigDigit::bits - (zeros as uint);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -234,7 +234,7 @@ mod tests {
|
|||
sorted.sort();
|
||||
let mut heap = PriorityQueue::from_vec(data);
|
||||
while !heap.is_empty() {
|
||||
assert_eq!(heap.top(), sorted.last());
|
||||
assert_eq!(heap.top(), sorted.last().unwrap());
|
||||
assert_eq!(heap.pop(), sorted.pop());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,17 +179,17 @@ impl Visitor<()> for Context {
|
|||
fn visit_mac(&mut self, macro: &ast::Mac, _: ()) {
|
||||
let ast::MacInvocTT(ref path, _, _) = macro.node;
|
||||
|
||||
if path.segments.last().identifier == self.sess.ident_of("macro_rules") {
|
||||
if path.segments.last().unwrap().identifier == self.sess.ident_of("macro_rules") {
|
||||
self.gate_feature("macro_rules", path.span, "macro definitions are \
|
||||
not stable enough for use and are subject to change");
|
||||
}
|
||||
|
||||
else if path.segments.last().identifier == self.sess.ident_of("asm") {
|
||||
else if path.segments.last().unwrap().identifier == self.sess.ident_of("asm") {
|
||||
self.gate_feature("asm", path.span, "inline assembly is not \
|
||||
stable enough for use and is subject to change");
|
||||
}
|
||||
|
||||
else if path.segments.last().identifier == self.sess.ident_of("log_syntax") {
|
||||
else if path.segments.last().unwrap().identifier == self.sess.ident_of("log_syntax") {
|
||||
self.gate_feature("log_syntax", path.span, "`log_syntax!` is not \
|
||||
stable enough for use and is subject to change");
|
||||
}
|
||||
|
|
|
@ -445,7 +445,7 @@ impl<'a> GatherLoanCtxt<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
let root_ub = { *self.repeating_ids.last() }; // FIXME(#5074)
|
||||
let root_ub = { *self.repeating_ids.last().unwrap() }; // FIXME(#5074)
|
||||
|
||||
// Check that the lifetime of the borrow does not exceed
|
||||
// the lifetime of the data being borrowed.
|
||||
|
|
|
@ -495,7 +495,7 @@ impl CFGBuilder {
|
|||
label: Option<ast::Name>) -> LoopScope {
|
||||
match label {
|
||||
None => {
|
||||
return *self.loop_scopes.last();
|
||||
return *self.loop_scopes.last().unwrap();
|
||||
}
|
||||
|
||||
Some(_) => {
|
||||
|
|
|
@ -1077,7 +1077,7 @@ fn check_pat_non_uppercase_statics(cx: &Context, p: &ast::Pat) {
|
|||
match (&p.node, def_map.get().find(&p.id)) {
|
||||
(&ast::PatIdent(_, ref path, _), Some(&ast::DefStatic(_, false))) => {
|
||||
// last identifier alone is right choice for this lint.
|
||||
let ident = path.segments.last().identifier;
|
||||
let ident = path.segments.last().unwrap().identifier;
|
||||
let s = cx.tcx.sess.str_of(ident);
|
||||
if s.chars().any(|c| c.is_lowercase()) {
|
||||
cx.span_lint(NonUppercasePatternStatics, path.span,
|
||||
|
|
|
@ -790,7 +790,7 @@ impl Liveness {
|
|||
|
||||
pub fn last_loop_scope(&self) -> NodeId {
|
||||
let loop_scope = self.loop_scope.borrow();
|
||||
*loop_scope.get().last()
|
||||
*loop_scope.get().last().unwrap()
|
||||
}
|
||||
|
||||
pub fn ln_str(&self, ln: LiveNode) -> ~str {
|
||||
|
@ -1593,7 +1593,7 @@ impl Liveness {
|
|||
} else {
|
||||
let ends_with_stmt = match body.expr {
|
||||
None if body.stmts.len() > 0 =>
|
||||
match body.stmts.last().node {
|
||||
match body.stmts.last().unwrap().node {
|
||||
StmtSemi(e, _) => {
|
||||
let t_stmt = ty::expr_ty(self.tcx, e);
|
||||
ty::get(t_stmt).sty == ty::get(t_ret).sty
|
||||
|
@ -1603,7 +1603,7 @@ impl Liveness {
|
|||
_ => false
|
||||
};
|
||||
if ends_with_stmt {
|
||||
let last_stmt = body.stmts.last();
|
||||
let last_stmt = body.stmts.last().unwrap();
|
||||
let span_semicolon = Span {
|
||||
lo: last_stmt.span.hi,
|
||||
hi: last_stmt.span.hi,
|
||||
|
|
|
@ -595,7 +595,7 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
match *self.last_private_map.get(&path_id) {
|
||||
resolve::AllPublic => {},
|
||||
resolve::DependsOn(def) => {
|
||||
let name = token::ident_to_str(&path.segments.last()
|
||||
let name = token::ident_to_str(&path.segments.last().unwrap()
|
||||
.identifier);
|
||||
self.ensure_public(span, def, Some(origdid),
|
||||
format!("{} `{}`", tyname, name));
|
||||
|
|
|
@ -1476,7 +1476,7 @@ impl Resolver {
|
|||
match view_path.node {
|
||||
ViewPathSimple(binding, ref full_path, id) => {
|
||||
let source_ident =
|
||||
full_path.segments.last().identifier;
|
||||
full_path.segments.last().unwrap().identifier;
|
||||
let subclass = @SingleImport(binding,
|
||||
source_ident);
|
||||
self.build_import_directive(module_,
|
||||
|
@ -4303,7 +4303,7 @@ impl Resolver {
|
|||
|
||||
// First, check to see whether the name is a primitive type.
|
||||
if path.segments.len() == 1 {
|
||||
let id = path.segments.last().identifier;
|
||||
let id = path.segments.last().unwrap().identifier;
|
||||
|
||||
match self.primitive_type_table
|
||||
.primitive_types
|
||||
|
@ -4342,7 +4342,7 @@ impl Resolver {
|
|||
debug!("(resolving type) resolved `{}` to \
|
||||
type {:?}",
|
||||
self.session.str_of(path.segments
|
||||
.last()
|
||||
.last().unwrap()
|
||||
.identifier),
|
||||
def);
|
||||
result_def = Some(def);
|
||||
|
@ -4561,7 +4561,7 @@ impl Resolver {
|
|||
path.span,
|
||||
format!("`{}` is not an enum variant or constant",
|
||||
self.session.str_of(
|
||||
path.segments.last().identifier)))
|
||||
path.segments.last().unwrap().identifier)))
|
||||
}
|
||||
None => {
|
||||
self.resolve_error(path.span,
|
||||
|
@ -4592,7 +4592,7 @@ impl Resolver {
|
|||
format!("`{}` is not an enum variant, struct or const",
|
||||
self.session
|
||||
.str_of(path.segments
|
||||
.last()
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
}
|
||||
None => {
|
||||
|
@ -4601,7 +4601,7 @@ impl Resolver {
|
|||
struct or const `{}`",
|
||||
self.session
|
||||
.str_of(path.segments
|
||||
.last()
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
}
|
||||
}
|
||||
|
@ -4722,7 +4722,7 @@ impl Resolver {
|
|||
|
||||
let unqualified_def =
|
||||
self.resolve_identifier(path.segments
|
||||
.last()
|
||||
.last().unwrap()
|
||||
.identifier,
|
||||
namespace,
|
||||
check_ribs,
|
||||
|
@ -4883,7 +4883,7 @@ impl Resolver {
|
|||
}
|
||||
}
|
||||
|
||||
let ident = path.segments.last().identifier;
|
||||
let ident = path.segments.last().unwrap().identifier;
|
||||
let def = match self.resolve_definition_of_name_in_module(containing_module,
|
||||
ident,
|
||||
namespace) {
|
||||
|
@ -4952,7 +4952,7 @@ impl Resolver {
|
|||
}
|
||||
}
|
||||
|
||||
let name = path.segments.last().identifier;
|
||||
let name = path.segments.last().unwrap().identifier;
|
||||
match self.resolve_definition_of_name_in_module(containing_module,
|
||||
name,
|
||||
namespace) {
|
||||
|
|
|
@ -575,7 +575,7 @@ fn enter_default<'r,'b>(
|
|||
// non guarded matches, and thus by exhaustiveness, we know that
|
||||
// we don't need any default cases. If the check *isn't* nonexhaustive
|
||||
// (because chk is Some), then we need the defaults anyways.
|
||||
let is_exhaustive = match matches.last_opt() {
|
||||
let is_exhaustive = match matches.last() {
|
||||
Some(m) if m.data.arm.guard.is_some() && chk.is_infallible() => true,
|
||||
_ => false
|
||||
};
|
||||
|
@ -913,7 +913,7 @@ fn get_options(bcx: &Block, m: &[Match], col: uint) -> ~[Opt] {
|
|||
// to not always merge conditions.
|
||||
fn add_veclen_to_set(set: &mut ~[Opt], i: uint,
|
||||
len: uint, vlo: VecLenOpt) {
|
||||
match set.last_opt() {
|
||||
match set.last() {
|
||||
// If the last condition in the list matches the one we want
|
||||
// to add, then extend its range. Otherwise, make a new
|
||||
// vec_len with a range just covering the new entry.
|
||||
|
|
|
@ -2014,7 +2014,7 @@ fn exported_name(ccx: &CrateContext, path: ast_map::Path,
|
|||
|
||||
// Don't mangle
|
||||
_ if attr::contains_name(attrs, "no_mangle")
|
||||
=> path_elem_to_str(*path.last(), token::get_ident_interner()),
|
||||
=> path_elem_to_str(*path.last().unwrap(), token::get_ident_interner()),
|
||||
|
||||
// Usual name mangling
|
||||
_ => mangle_exported_name(ccx, path, ty)
|
||||
|
|
|
@ -475,7 +475,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
|
|||
|
||||
fn top_scope<R>(&self, f: |&CleanupScope<'a>| -> R) -> R {
|
||||
let scopes = self.scopes.borrow();
|
||||
f(scopes.get().last())
|
||||
f(scopes.get().last().unwrap())
|
||||
}
|
||||
|
||||
fn trans_cleanups_to_exit_scope(&self,
|
||||
|
@ -568,7 +568,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
|
|||
// and this scope is that loop, then stop popping and set
|
||||
// `prev_llbb` to the appropriate exit block from the loop.
|
||||
popped_scopes.push(self.pop_scope());
|
||||
let scope = popped_scopes.last();
|
||||
let scope = popped_scopes.last().unwrap();
|
||||
match label {
|
||||
UnwindExit | ReturnExit => { }
|
||||
LoopExit(id, exit) => {
|
||||
|
|
|
@ -2036,7 +2036,7 @@ fn trait_metadata(cx: &CrateContext,
|
|||
// assigned the correct name, size, namespace, and source location. But it does not describe
|
||||
// the trait's methods.
|
||||
let path = ty::item_path(cx.tcx, def_id);
|
||||
let ident = path.last().ident();
|
||||
let ident = path.last().unwrap().ident();
|
||||
let name = ppaux::trait_store_to_str(cx.tcx, trait_store) +
|
||||
ppaux::mutability_to_str(mutability) +
|
||||
token::ident_to_str(&ident);
|
||||
|
@ -2361,7 +2361,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess.codemap.lookup_char_pos(scope_span.lo);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let parent_scope = scope_stack.last().scope_metadata;
|
||||
let parent_scope = scope_stack.last().unwrap().scope_metadata;
|
||||
|
||||
let scope_metadata = unsafe {
|
||||
llvm::LLVMDIBuilderCreateLexicalBlock(
|
||||
|
@ -2377,11 +2377,11 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
inner_walk(cx, scope_stack, scope_map);
|
||||
|
||||
// pop artificial scopes
|
||||
while scope_stack.last().ident.is_some() {
|
||||
while scope_stack.last().unwrap().ident.is_some() {
|
||||
scope_stack.pop();
|
||||
}
|
||||
|
||||
if scope_stack.last().scope_metadata != scope_metadata {
|
||||
if scope_stack.last().unwrap().scope_metadata != scope_metadata {
|
||||
cx.sess.span_bug(scope_span, "debuginfo: Inconsistency in scope management.");
|
||||
}
|
||||
|
||||
|
@ -2392,11 +2392,12 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
block: &ast::Block,
|
||||
scope_stack: &mut ~[ScopeStackEntry],
|
||||
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
|
||||
scope_map.insert(block.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(block.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
// The interesting things here are statements and the concluding expression.
|
||||
for statement in block.stmts.iter() {
|
||||
scope_map.insert(ast_util::stmt_id(*statement), scope_stack.last().scope_metadata);
|
||||
scope_map.insert(ast_util::stmt_id(*statement),
|
||||
scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
match statement.node {
|
||||
ast::StmtDecl(decl, _) => walk_decl(cx, decl, scope_stack, scope_map),
|
||||
|
@ -2417,7 +2418,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
|
||||
match *decl {
|
||||
codemap::Spanned { node: ast::DeclLocal(local), .. } => {
|
||||
scope_map.insert(local.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(local.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
walk_pattern(cx, local.pat, scope_stack, scope_map);
|
||||
|
||||
|
@ -2477,7 +2478,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess.codemap.lookup_char_pos(pat.span.lo);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let parent_scope = scope_stack.last().scope_metadata;
|
||||
let parent_scope = scope_stack.last().unwrap().scope_metadata;
|
||||
|
||||
let scope_metadata = unsafe {
|
||||
llvm::LLVMDIBuilderCreateLexicalBlock(
|
||||
|
@ -2495,7 +2496,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
|
||||
} else {
|
||||
// Push a new entry anyway so the name can be found
|
||||
let prev_metadata = scope_stack.last().scope_metadata;
|
||||
let prev_metadata = scope_stack.last().unwrap().scope_metadata;
|
||||
scope_stack.push(ScopeStackEntry {
|
||||
scope_metadata: prev_metadata,
|
||||
ident: Some(ident)
|
||||
|
@ -2503,7 +2504,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
}
|
||||
}
|
||||
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
for &sub_pat in sub_pat_opt.iter() {
|
||||
walk_pattern(cx, sub_pat, scope_stack, scope_map);
|
||||
|
@ -2511,11 +2512,11 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
}
|
||||
|
||||
ast::PatWild | ast::PatWildMulti => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
}
|
||||
|
||||
ast::PatEnum(_, ref sub_pats_opt) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
for ref sub_pats in sub_pats_opt.iter() {
|
||||
for &p in sub_pats.iter() {
|
||||
|
@ -2525,7 +2526,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
}
|
||||
|
||||
ast::PatStruct(_, ref field_pats, _) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
for &ast::FieldPat { pat: sub_pat, .. } in field_pats.iter() {
|
||||
walk_pattern(cx, sub_pat, scope_stack, scope_map);
|
||||
|
@ -2533,7 +2534,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
}
|
||||
|
||||
ast::PatTup(ref sub_pats) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
for &sub_pat in sub_pats.iter() {
|
||||
walk_pattern(cx, sub_pat, scope_stack, scope_map);
|
||||
|
@ -2541,23 +2542,23 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
}
|
||||
|
||||
ast::PatUniq(sub_pat) | ast::PatRegion(sub_pat) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
walk_pattern(cx, sub_pat, scope_stack, scope_map);
|
||||
}
|
||||
|
||||
ast::PatLit(exp) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
walk_expr(cx, exp, scope_stack, scope_map);
|
||||
}
|
||||
|
||||
ast::PatRange(exp1, exp2) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
walk_expr(cx, exp1, scope_stack, scope_map);
|
||||
walk_expr(cx, exp2, scope_stack, scope_map);
|
||||
}
|
||||
|
||||
ast::PatVec(ref front_sub_pats, ref middle_sub_pats, ref back_sub_pats) => {
|
||||
scope_map.insert(pat.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
for &sub_pat in front_sub_pats.iter() {
|
||||
walk_pattern(cx, sub_pat, scope_stack, scope_map);
|
||||
|
@ -2579,7 +2580,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
scope_stack: &mut ~[ScopeStackEntry],
|
||||
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
|
||||
|
||||
scope_map.insert(exp.id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(exp.id, scope_stack.last().unwrap().scope_metadata);
|
||||
|
||||
match exp.node {
|
||||
ast::ExprLogLevel |
|
||||
|
@ -2606,14 +2607,14 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
},
|
||||
|
||||
ast::ExprUnary(node_id, _, sub_exp) => {
|
||||
scope_map.insert(node_id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(node_id, scope_stack.last().unwrap().scope_metadata);
|
||||
walk_expr(cx, sub_exp, scope_stack, scope_map);
|
||||
}
|
||||
|
||||
ast::ExprAssignOp(node_id, _, lhs, rhs) |
|
||||
ast::ExprIndex(node_id, lhs, rhs) |
|
||||
ast::ExprBinary(node_id, _, lhs, rhs) => {
|
||||
scope_map.insert(node_id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(node_id, scope_stack.last().unwrap().scope_metadata);
|
||||
walk_expr(cx, lhs, scope_stack, scope_map);
|
||||
walk_expr(cx, rhs, scope_stack, scope_map);
|
||||
}
|
||||
|
@ -2720,7 +2721,7 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
}
|
||||
|
||||
ast::ExprMethodCall(node_id, receiver_exp, _, _, ref args, _) => {
|
||||
scope_map.insert(node_id, scope_stack.last().scope_metadata);
|
||||
scope_map.insert(node_id, scope_stack.last().unwrap().scope_metadata);
|
||||
walk_expr(cx, receiver_exp, scope_stack, scope_map);
|
||||
|
||||
for arg_exp in args.iter() {
|
||||
|
|
|
@ -170,9 +170,9 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
|
|||
// region with the current anon region binding (in other words,
|
||||
// whatever & would get replaced with).
|
||||
let expected_num_region_params = decl_generics.region_param_defs.len();
|
||||
let supplied_num_region_params = path.segments.last().lifetimes.len();
|
||||
let supplied_num_region_params = path.segments.last().unwrap().lifetimes.len();
|
||||
let regions = if expected_num_region_params == supplied_num_region_params {
|
||||
path.segments.last().lifetimes.map(
|
||||
path.segments.last().unwrap().lifetimes.map(
|
||||
|l| ast_region_to_region(this.tcx(), l))
|
||||
} else {
|
||||
let anon_regions =
|
||||
|
@ -373,7 +373,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
}
|
||||
|
||||
if (flags & NO_REGIONS) != 0u {
|
||||
if !path.segments.last().lifetimes.is_empty() {
|
||||
if !path.segments.last().unwrap().lifetimes.is_empty() {
|
||||
tcx.sess.span_err(
|
||||
path.span,
|
||||
"region parameters are not allowed on this type");
|
||||
|
|
|
@ -3827,9 +3827,9 @@ pub fn instantiate_path(fcx: @FnCtxt,
|
|||
// determine the region parameters, using the value given by the user
|
||||
// (if any) and otherwise using a fresh region variable
|
||||
let num_expected_regions = tpt.generics.region_param_defs.len();
|
||||
let num_supplied_regions = pth.segments.last().lifetimes.len();
|
||||
let num_supplied_regions = pth.segments.last().unwrap().lifetimes.len();
|
||||
let regions = if num_expected_regions == num_supplied_regions {
|
||||
pth.segments.last().lifetimes.map(
|
||||
pth.segments.last().unwrap().lifetimes.map(
|
||||
|l| ast_region_to_region(fcx.tcx(), l))
|
||||
} else {
|
||||
if num_supplied_regions != 0 {
|
||||
|
|
|
@ -164,7 +164,7 @@ fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
|
|||
info: |&render::Cache| -> Option<(~[~str], &'static str)>) {
|
||||
// The generics will get written to both the title and link
|
||||
let mut generics = ~"";
|
||||
let last = path.segments.last();
|
||||
let last = path.segments.last().unwrap();
|
||||
if last.lifetimes.len() > 0 || last.types.len() > 0 {
|
||||
let mut counter = 0;
|
||||
generics.push_str("<");
|
||||
|
@ -230,13 +230,13 @@ fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
|
|||
}
|
||||
match shortty {
|
||||
"mod" => {
|
||||
url.push_str(*fqp.last());
|
||||
url.push_str(*fqp.last().unwrap());
|
||||
url.push_str("/index.html");
|
||||
}
|
||||
_ => {
|
||||
url.push_str(shortty);
|
||||
url.push_str(".");
|
||||
url.push_str(*fqp.last());
|
||||
url.push_str(*fqp.last().unwrap());
|
||||
url.push_str(".html");
|
||||
}
|
||||
}
|
||||
|
@ -457,7 +457,7 @@ impl fmt::Default for clean::ViewPath {
|
|||
fn fmt(v: &clean::ViewPath, f: &mut fmt::Formatter) {
|
||||
match *v {
|
||||
clean::SimpleImport(ref name, ref src) => {
|
||||
if *name == src.path.segments.last().name {
|
||||
if *name == src.path.segments.last().unwrap().name {
|
||||
write!(f.buf, "use {};", *src);
|
||||
} else {
|
||||
write!(f.buf, "use {} = {};", *name, *src);
|
||||
|
|
|
@ -275,7 +275,7 @@ pub fn run(mut crate: clean::Crate, dst: Path) {
|
|||
for (i, (&id, &(ref fqp, short))) in cache.paths.iter().enumerate() {
|
||||
if i > 0 { write!(w, ","); }
|
||||
write!(w, "'{}':\\{type:'{}',name:'{}'\\}",
|
||||
id, short, *fqp.last());
|
||||
id, short, *fqp.last().unwrap());
|
||||
}
|
||||
write!(w, "\\};");
|
||||
w.flush();
|
||||
|
@ -522,7 +522,7 @@ impl DocFolder for Cache {
|
|||
clean::TyMethodItem(..) |
|
||||
clean::StructFieldItem(..) |
|
||||
clean::VariantItem(..) => {
|
||||
Some((Some(*self.parent_stack.last()),
|
||||
Some((Some(*self.parent_stack.last().unwrap()),
|
||||
self.stack.slice_to(self.stack.len() - 1)))
|
||||
|
||||
}
|
||||
|
@ -530,7 +530,7 @@ impl DocFolder for Cache {
|
|||
if self.parent_stack.len() == 0 {
|
||||
None
|
||||
} else {
|
||||
let last = self.parent_stack.last();
|
||||
let last = self.parent_stack.last().unwrap();
|
||||
let amt = match self.paths.find(last) {
|
||||
Some(&(_, "trait")) => self.stack.len() - 1,
|
||||
Some(..) | None => self.stack.len(),
|
||||
|
|
|
@ -952,10 +952,8 @@ pub trait ImmutableVector<'a, T> {
|
|||
fn init(&self) -> &'a [T];
|
||||
/// Returns all but the last `n' elements of a vector
|
||||
fn initn(&self, n: uint) -> &'a [T];
|
||||
/// Returns the last element of a vector, failing if the vector is empty.
|
||||
fn last(&self) -> &'a T;
|
||||
/// Returns the last element of a vector, or `None` if it is empty.
|
||||
fn last_opt(&self) -> Option<&'a T>;
|
||||
fn last(&self) -> Option<&'a T>;
|
||||
/**
|
||||
* Apply a function to each element of a vector and return a concatenation
|
||||
* of each result vector
|
||||
|
@ -1142,13 +1140,7 @@ impl<'a,T> ImmutableVector<'a, T> for &'a [T] {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn last(&self) -> &'a T {
|
||||
if self.len() == 0 { fail!("last: empty vector") }
|
||||
&self[self.len() - 1]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn last_opt(&self) -> Option<&'a T> {
|
||||
fn last(&self) -> Option<&'a T> {
|
||||
if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
|
||||
}
|
||||
|
||||
|
@ -3116,27 +3108,12 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_last() {
|
||||
let mut a = ~[11];
|
||||
assert_eq!(a.last(), &11);
|
||||
a = ~[11, 12];
|
||||
assert_eq!(a.last(), &12);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_fail]
|
||||
fn test_last_empty() {
|
||||
let a: ~[int] = ~[];
|
||||
a.last();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_last_opt() {
|
||||
let mut a = ~[];
|
||||
assert_eq!(a.last_opt(), None);
|
||||
assert_eq!(a.last(), None);
|
||||
a = ~[11];
|
||||
assert_eq!(a.last_opt().unwrap(), &11);
|
||||
assert_eq!(a.last().unwrap(), &11);
|
||||
a = ~[11, 12];
|
||||
assert_eq!(a.last_opt().unwrap(), &12);
|
||||
assert_eq!(a.last().unwrap(), &12);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -106,7 +106,7 @@ fn pretty_ty(ty: &Ty, itr: @IdentInterner, out: &mut ~str) {
|
|||
// need custom handling.
|
||||
TyNil => { out.push_str("$NIL$"); return }
|
||||
TyPath(ref path, _, _) => {
|
||||
out.push_str(itr.get(path.segments.last().identifier.name));
|
||||
out.push_str(itr.get(path.segments.last().unwrap().identifier.name));
|
||||
return
|
||||
}
|
||||
TyTup(ref tys) => {
|
||||
|
@ -139,7 +139,7 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> PathElem {
|
|||
match *trait_ref {
|
||||
None => pretty = ~"",
|
||||
Some(ref trait_ref) => {
|
||||
pretty = itr.get(trait_ref.path.segments.last().identifier.name).to_owned();
|
||||
pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name).to_owned();
|
||||
pretty.push_char('$');
|
||||
}
|
||||
};
|
||||
|
|
|
@ -31,7 +31,7 @@ pub fn path_name_i(idents: &[Ident]) -> ~str {
|
|||
// totally scary function: ignores all but the last element, should have
|
||||
// a different name
|
||||
pub fn path_to_ident(path: &Path) -> Ident {
|
||||
path.segments.last().identifier
|
||||
path.segments.last().unwrap().identifier
|
||||
}
|
||||
|
||||
pub fn local_def(id: NodeId) -> DefId {
|
||||
|
@ -913,7 +913,7 @@ pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) {
|
|||
// get the last element of a mutable array.
|
||||
// FIXME #4903: , must be a separate procedure for now.
|
||||
pub fn getLast(arr: &~[Mrk]) -> Mrk {
|
||||
*arr.last()
|
||||
*arr.last().unwrap()
|
||||
}
|
||||
|
||||
// are two paths equal when compared unhygienically?
|
||||
|
|
|
@ -269,12 +269,9 @@ impl CodeMap {
|
|||
|
||||
pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap {
|
||||
let mut files = self.files.borrow_mut();
|
||||
let start_pos = if files.get().len() == 0 {
|
||||
0
|
||||
} else {
|
||||
let last_start = files.get().last().start_pos.to_uint();
|
||||
let last_len = files.get().last().src.len();
|
||||
last_start + last_len
|
||||
let start_pos = match files.get().last() {
|
||||
None => 0,
|
||||
Some(last) => last.start_pos.to_uint() + last.src.len(),
|
||||
};
|
||||
|
||||
let filemap = @FileMap {
|
||||
|
|
|
@ -920,7 +920,7 @@ enum StructType {
|
|||
// general helper methods.
|
||||
impl<'a> TraitDef<'a> {
|
||||
fn set_expn_info(&self, mut to_set: Span) -> Span {
|
||||
let trait_name = match self.path.path.last_opt() {
|
||||
let trait_name = match self.path.path.last() {
|
||||
None => self.cx.span_bug(self.span, "trait with empty path in generic `deriving`"),
|
||||
Some(name) => *name
|
||||
};
|
||||
|
|
|
@ -190,7 +190,8 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
|
|||
if !r.stack.get().dotdotdoted || {
|
||||
let repeat_idx = r.repeat_idx.borrow();
|
||||
let repeat_len = r.repeat_len.borrow();
|
||||
*repeat_idx.get().last() == *repeat_len.get().last() - 1
|
||||
*repeat_idx.get().last().unwrap() ==
|
||||
*repeat_len.get().last().unwrap() - 1
|
||||
} {
|
||||
|
||||
match r.stack.get().up {
|
||||
|
|
|
@ -55,10 +55,10 @@ impl<T> OptVec<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn last<'a>(&'a self) -> &'a T {
|
||||
pub fn last<'a>(&'a self) -> Option<&'a T> {
|
||||
match *self {
|
||||
Vec(ref v) => v.last(),
|
||||
Empty => fail!("last on empty opt_vec")
|
||||
Empty => None
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1753,19 +1753,19 @@ impl Parser {
|
|||
return self.mk_expr(lo, hi, ExprLit(lit));
|
||||
}
|
||||
let mut es = ~[self.parse_expr()];
|
||||
self.commit_expr(*es.last(), &[], &[token::COMMA, token::RPAREN]);
|
||||
self.commit_expr(*es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]);
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
if self.token != token::RPAREN {
|
||||
es.push(self.parse_expr());
|
||||
self.commit_expr(*es.last(), &[], &[token::COMMA, token::RPAREN]);
|
||||
self.commit_expr(*es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]);
|
||||
}
|
||||
else {
|
||||
trailing_comma = true;
|
||||
}
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.commit_expr_expecting(*es.last(), token::RPAREN);
|
||||
self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN);
|
||||
|
||||
return if es.len() == 1 && !trailing_comma {
|
||||
self.mk_expr(lo, self.span.hi, ExprParen(es[0]))
|
||||
|
@ -1924,7 +1924,8 @@ impl Parser {
|
|||
|
||||
fields.push(self.parse_field());
|
||||
while self.token != token::RBRACE {
|
||||
self.commit_expr(fields.last().expr, &[token::COMMA], &[token::RBRACE]);
|
||||
self.commit_expr(fields.last().unwrap().expr,
|
||||
&[token::COMMA], &[token::RBRACE]);
|
||||
|
||||
if self.eat(&token::DOTDOT) {
|
||||
base = Some(self.parse_expr());
|
||||
|
@ -1939,7 +1940,7 @@ impl Parser {
|
|||
}
|
||||
|
||||
hi = pth.span.hi;
|
||||
self.commit_expr_expecting(fields.last().expr, token::RBRACE);
|
||||
self.commit_expr_expecting(fields.last().unwrap().expr, token::RBRACE);
|
||||
ex = ExprStruct(pth, fields, base);
|
||||
return self.mk_expr(lo, hi, ex);
|
||||
}
|
||||
|
@ -2092,7 +2093,7 @@ impl Parser {
|
|||
// This is a conservative error: only report the last unclosed delimiter. The
|
||||
// previous unclosed delimiters could actually be closed! The parser just hasn't
|
||||
// gotten to them yet.
|
||||
match p.open_braces.last_opt() {
|
||||
match p.open_braces.last() {
|
||||
None => {}
|
||||
Some(&sp) => p.span_note(sp, "unclosed delimiter"),
|
||||
};
|
||||
|
|
|
@ -1947,7 +1947,7 @@ pub fn print_view_path(s: &mut State, vp: &ast::ViewPath) {
|
|||
match vp.node {
|
||||
ast::ViewPathSimple(ident, ref path, _) => {
|
||||
// FIXME(#6993) can't compare identifiers directly here
|
||||
if path.segments.last().identifier.name != ident.name {
|
||||
if path.segments.last().unwrap().identifier.name != ident.name {
|
||||
print_ident(s, ident);
|
||||
space(&mut s.s);
|
||||
word_space(s, "=");
|
||||
|
|
|
@ -91,7 +91,7 @@ fn recurse_or_fail(depth: int, st: Option<State>) {
|
|||
unique: ~Cons((), @*st.unique),
|
||||
tuple: (@Cons((), st.tuple.first()),
|
||||
~Cons((), @*st.tuple.second())),
|
||||
vec: st.vec + &[@Cons((), *st.vec.last())],
|
||||
vec: st.vec + &[@Cons((), *st.vec.last().unwrap())],
|
||||
res: r(@Cons((), st.res._l))
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue