1
Fork 0

option: rewrite the API to use composition

This commit is contained in:
Daniel Micay 2013-09-20 02:08:47 -04:00
parent f647ccc79c
commit 6a90e80b62
80 changed files with 244 additions and 277 deletions

View file

@ -109,8 +109,8 @@ pub fn parse_config(args: ~[~str]) -> config {
compile_lib_path: matches.opt_str("compile-lib-path").unwrap(), compile_lib_path: matches.opt_str("compile-lib-path").unwrap(),
run_lib_path: matches.opt_str("run-lib-path").unwrap(), run_lib_path: matches.opt_str("run-lib-path").unwrap(),
rustc_path: opt_path(matches, "rustc-path"), rustc_path: opt_path(matches, "rustc-path"),
clang_path: matches.opt_str("clang-path").map_move(|s| Path(s)), clang_path: matches.opt_str("clang-path").map(|s| Path(s)),
llvm_bin_path: matches.opt_str("llvm-bin-path").map_move(|s| Path(s)), llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path(s)),
src_base: opt_path(matches, "src-base"), src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"), build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"), aux_base: opt_path(matches, "aux-base"),
@ -123,10 +123,10 @@ pub fn parse_config(args: ~[~str]) -> config {
} else { } else {
None None
}, },
logfile: matches.opt_str("logfile").map_move(|s| Path(s)), logfile: matches.opt_str("logfile").map(|s| Path(s)),
save_metrics: matches.opt_str("save-metrics").map_move(|s| Path(s)), save_metrics: matches.opt_str("save-metrics").map(|s| Path(s)),
ratchet_metrics: ratchet_metrics:
matches.opt_str("ratchet-metrics").map_move(|s| Path(s)), matches.opt_str("ratchet-metrics").map(|s| Path(s)),
ratchet_noise_percent: ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent").and_then(|s| from_str::<f64>(s)), matches.opt_str("ratchet-noise-percent").and_then(|s| from_str::<f64>(s)),
runtool: matches.opt_str("runtool"), runtool: matches.opt_str("runtool"),

View file

@ -114,7 +114,7 @@ impl<T: Send> GenericPort<T> for SyncPort<T> {
} }
fn try_recv(&self) -> Option<T> { fn try_recv(&self) -> Option<T> {
do self.duplex_stream.try_recv().map_move |val| { do self.duplex_stream.try_recv().map |val| {
self.duplex_stream.try_send(()); self.duplex_stream.try_send(());
val val
} }

View file

@ -165,7 +165,7 @@ impl<T> DList<T> {
/// Remove the first Node and return it, or None if the list is empty /// Remove the first Node and return it, or None if the list is empty
#[inline] #[inline]
fn pop_front_node(&mut self) -> Option<~Node<T>> { fn pop_front_node(&mut self) -> Option<~Node<T>> {
do self.list_head.take().map_move |mut front_node| { do self.list_head.take().map |mut front_node| {
self.length -= 1; self.length -= 1;
match front_node.next.take() { match front_node.next.take() {
Some(node) => self.list_head = link_with_prev(node, Rawlink::none()), Some(node) => self.list_head = link_with_prev(node, Rawlink::none()),
@ -191,7 +191,7 @@ impl<T> DList<T> {
/// Remove the last Node and return it, or None if the list is empty /// Remove the last Node and return it, or None if the list is empty
#[inline] #[inline]
fn pop_back_node(&mut self) -> Option<~Node<T>> { fn pop_back_node(&mut self) -> Option<~Node<T>> {
do self.list_tail.resolve().map_move_default(None) |tail| { do self.list_tail.resolve().map_default(None) |tail| {
self.length -= 1; self.length -= 1;
self.list_tail = tail.prev; self.list_tail = tail.prev;
match tail.prev.resolve() { match tail.prev.resolve() {
@ -206,25 +206,27 @@ impl<T> Deque<T> for DList<T> {
/// Provide a reference to the front element, or None if the list is empty /// Provide a reference to the front element, or None if the list is empty
#[inline] #[inline]
fn front<'a>(&'a self) -> Option<&'a T> { fn front<'a>(&'a self) -> Option<&'a T> {
self.list_head.map(|head| &head.value) self.list_head.as_ref().map(|head| &head.value)
} }
/// Provide a mutable reference to the front element, or None if the list is empty /// Provide a mutable reference to the front element, or None if the list is empty
#[inline] #[inline]
fn front_mut<'a>(&'a mut self) -> Option<&'a mut T> { fn front_mut<'a>(&'a mut self) -> Option<&'a mut T> {
self.list_head.map_mut(|head| &mut head.value) self.list_head.as_mut().map(|head| &mut head.value)
} }
/// Provide a reference to the back element, or None if the list is empty /// Provide a reference to the back element, or None if the list is empty
#[inline] #[inline]
fn back<'a>(&'a self) -> Option<&'a T> { fn back<'a>(&'a self) -> Option<&'a T> {
self.list_tail.resolve_immut().map(|tail| &tail.value) let tmp = self.list_tail.resolve_immut(); // FIXME: #3511: shouldn't need variable
tmp.as_ref().map(|tail| &tail.value)
} }
/// Provide a mutable reference to the back element, or None if the list is empty /// Provide a mutable reference to the back element, or None if the list is empty
#[inline] #[inline]
fn back_mut<'a>(&'a mut self) -> Option<&'a mut T> { fn back_mut<'a>(&'a mut self) -> Option<&'a mut T> {
self.list_tail.resolve().map_mut(|tail| &mut tail.value) let mut tmp = self.list_tail.resolve(); // FIXME: #3511: shouldn't need variable
tmp.as_mut().map(|tail| &mut tail.value)
} }
/// Add an element first in the list /// Add an element first in the list
@ -238,7 +240,7 @@ impl<T> Deque<T> for DList<T> {
/// ///
/// O(1) /// O(1)
fn pop_front(&mut self) -> Option<T> { fn pop_front(&mut self) -> Option<T> {
self.pop_front_node().map_move(|~Node{value, _}| value) self.pop_front_node().map(|~Node{value, _}| value)
} }
/// Add an element last in the list /// Add an element last in the list
@ -252,7 +254,7 @@ impl<T> Deque<T> for DList<T> {
/// ///
/// O(1) /// O(1)
fn pop_back(&mut self) -> Option<T> { fn pop_back(&mut self) -> Option<T> {
self.pop_back_node().map_move(|~Node{value, _}| value) self.pop_back_node().map(|~Node{value, _}| value)
} }
} }
@ -268,7 +270,7 @@ impl<T> DList<T> {
/// If the list is empty, do nothing. /// If the list is empty, do nothing.
#[inline] #[inline]
pub fn rotate_forward(&mut self) { pub fn rotate_forward(&mut self) {
do self.pop_back_node().map_move |tail| { do self.pop_back_node().map |tail| {
self.push_front_node(tail) self.push_front_node(tail)
}; };
} }
@ -278,7 +280,7 @@ impl<T> DList<T> {
/// If the list is empty, do nothing. /// If the list is empty, do nothing.
#[inline] #[inline]
pub fn rotate_backward(&mut self) { pub fn rotate_backward(&mut self) {
do self.pop_front_node().map_move |head| { do self.pop_front_node().map |head| {
self.push_back_node(head) self.push_back_node(head)
}; };
} }
@ -442,7 +444,7 @@ impl<'self, A> Iterator<&'self A> for DListIterator<'self, A> {
if self.nelem == 0 { if self.nelem == 0 {
return None; return None;
} }
do self.head.map |head| { do self.head.as_ref().map |head| {
self.nelem -= 1; self.nelem -= 1;
self.head = &head.next; self.head = &head.next;
&head.value &head.value
@ -461,7 +463,8 @@ impl<'self, A> DoubleEndedIterator<&'self A> for DListIterator<'self, A> {
if self.nelem == 0 { if self.nelem == 0 {
return None; return None;
} }
do self.tail.resolve().map_move |prev| { let tmp = self.tail.resolve_immut(); // FIXME: #3511: shouldn't need variable
do tmp.as_ref().map |prev| {
self.nelem -= 1; self.nelem -= 1;
self.tail = prev.prev; self.tail = prev.prev;
&prev.value &prev.value
@ -477,7 +480,7 @@ impl<'self, A> Iterator<&'self mut A> for MutDListIterator<'self, A> {
if self.nelem == 0 { if self.nelem == 0 {
return None; return None;
} }
do self.head.resolve().map_move |next| { do self.head.resolve().map |next| {
self.nelem -= 1; self.nelem -= 1;
self.head = match next.next { self.head = match next.next {
Some(ref mut node) => Rawlink::some(&mut **node), Some(ref mut node) => Rawlink::some(&mut **node),
@ -499,7 +502,7 @@ impl<'self, A> DoubleEndedIterator<&'self mut A> for MutDListIterator<'self, A>
if self.nelem == 0 { if self.nelem == 0 {
return None; return None;
} }
do self.tail.resolve().map_move |prev| { do self.tail.resolve().map |prev| {
self.nelem -= 1; self.nelem -= 1;
self.tail = prev.prev; self.tail = prev.prev;
&mut prev.value &mut prev.value
@ -554,7 +557,7 @@ impl<'self, A> ListInsertion<A> for MutDListIterator<'self, A> {
if self.nelem == 0 { if self.nelem == 0 {
return None return None
} }
self.head.resolve().map_move(|head| &mut head.value) self.head.resolve().map(|head| &mut head.value)
} }
} }

View file

@ -596,8 +596,7 @@ mod test {
|i| format!("tmp/lib-fileinput-test-next-file-{}.tmp", i)),true); |i| format!("tmp/lib-fileinput-test-next-file-{}.tmp", i)),true);
for (i, filename) in filenames.iter().enumerate() { for (i, filename) in filenames.iter().enumerate() {
let contents = let contents = vec::from_fn(3, |j| format!("{} {}", i, j + 1));
vec::from_fn(3, |j| format!("{} {}", i, j + 1));
make_file(filename.get_ref(), contents); make_file(filename.get_ref(), contents);
} }

View file

@ -698,7 +698,7 @@ impl BigUint {
#[inline] #[inline]
pub fn new(v: ~[BigDigit]) -> BigUint { pub fn new(v: ~[BigDigit]) -> BigUint {
// omit trailing zeros // omit trailing zeros
let new_len = v.iter().rposition(|n| *n != 0).map_move_default(0, |p| p + 1); let new_len = v.iter().rposition(|n| *n != 0).map_default(0, |p| p + 1);
if new_len == v.len() { return BigUint { data: v }; } if new_len == v.len() { return BigUint { data: v }; }
let mut v = v; let mut v = v;
@ -1417,7 +1417,7 @@ impl BigInt {
start = 1; start = 1;
} }
return BigUint::parse_bytes(buf.slice(start, buf.len()), radix) return BigUint::parse_bytes(buf.slice(start, buf.len()), radix)
.map_move(|bu| BigInt::from_biguint(sign, bu)); .map(|bu| BigInt::from_biguint(sign, bu));
} }
/// Converts this `BigInt` into a `BigUint`, if it's not negative. /// Converts this `BigInt` into a `BigUint`, if it's not negative.
@ -2507,7 +2507,7 @@ mod bigint_tests {
#[test] #[test]
fn test_from_str_radix() { fn test_from_str_radix() {
fn check(s: &str, ans: Option<int>) { fn check(s: &str, ans: Option<int>) {
let ans = ans.map_move(|n| { let ans = ans.map(|n| {
let x: BigInt = FromPrimitive::from_int(n).unwrap(); let x: BigInt = FromPrimitive::from_int(n).unwrap();
x x
}); });

View file

@ -158,7 +158,7 @@ impl<V> SmallIntMap<V> {
{ {
let values = replace(&mut self.v, ~[]); let values = replace(&mut self.v, ~[]);
values.move_iter().enumerate().filter_map(|(i, v)| { values.move_iter().enumerate().filter_map(|(i, v)| {
v.map_move(|v| (i, v)) v.map(|v| (i, v))
}) })
} }
} }

View file

@ -127,7 +127,7 @@ impl Terminal {
let inf = ti.unwrap(); let inf = ti.unwrap();
let nc = if inf.strings.find_equiv(&("setaf")).is_some() let nc = if inf.strings.find_equiv(&("setaf")).is_some()
&& inf.strings.find_equiv(&("setab")).is_some() { && inf.strings.find_equiv(&("setab")).is_some() {
inf.numbers.find_equiv(&("colors")).map_move_default(0, |&n| n) inf.numbers.find_equiv(&("colors")).map_default(0, |&n| n)
} else { 0 }; } else { 0 };
return Ok(Terminal {out: out, ti: inf, num_colors: nc}); return Ok(Terminal {out: out, ti: inf, num_colors: nc});
@ -220,7 +220,7 @@ impl Terminal {
cap = self.ti.strings.find_equiv(&("op")); cap = self.ti.strings.find_equiv(&("op"));
} }
} }
let s = do cap.map_move_default(Err(~"can't find terminfo capability `sgr0`")) |op| { let s = do cap.map_default(Err(~"can't find terminfo capability `sgr0`")) |op| {
expand(*op, [], &mut Variables::new()) expand(*op, [], &mut Variables::new())
}; };
if s.is_ok() { if s.is_ok() {

View file

@ -241,20 +241,20 @@ pub fn parse_opts(args: &[~str]) -> Option<OptRes> {
let run_ignored = matches.opt_present("ignored"); let run_ignored = matches.opt_present("ignored");
let logfile = matches.opt_str("logfile"); let logfile = matches.opt_str("logfile");
let logfile = logfile.map_move(|s| Path(s)); let logfile = logfile.map(|s| Path(s));
let run_benchmarks = matches.opt_present("bench"); let run_benchmarks = matches.opt_present("bench");
let run_tests = ! run_benchmarks || let run_tests = ! run_benchmarks ||
matches.opt_present("test"); matches.opt_present("test");
let ratchet_metrics = matches.opt_str("ratchet-metrics"); let ratchet_metrics = matches.opt_str("ratchet-metrics");
let ratchet_metrics = ratchet_metrics.map_move(|s| Path(s)); let ratchet_metrics = ratchet_metrics.map(|s| Path(s));
let ratchet_noise_percent = matches.opt_str("ratchet-noise-percent"); let ratchet_noise_percent = matches.opt_str("ratchet-noise-percent");
let ratchet_noise_percent = ratchet_noise_percent.map_move(|s| from_str::<f64>(s).unwrap()); let ratchet_noise_percent = ratchet_noise_percent.map(|s| from_str::<f64>(s).unwrap());
let save_metrics = matches.opt_str("save-metrics"); let save_metrics = matches.opt_str("save-metrics");
let save_metrics = save_metrics.map_move(|s| Path(s)); let save_metrics = save_metrics.map(|s| Path(s));
let test_shard = matches.opt_str("test-shard"); let test_shard = matches.opt_str("test-shard");
let test_shard = opt_shard(test_shard); let test_shard = opt_shard(test_shard);

View file

@ -385,7 +385,7 @@ impl<'self, T> Iterator<&'self T> for TreeSetIterator<'self, T> {
/// Advance the iterator to the next node (in order). If there are no more nodes, return `None`. /// Advance the iterator to the next node (in order). If there are no more nodes, return `None`.
#[inline] #[inline]
fn next(&mut self) -> Option<&'self T> { fn next(&mut self) -> Option<&'self T> {
do self.iter.next().map_move |(value, _)| { value } do self.iter.next().map |(value, _)| { value }
} }
} }
@ -393,7 +393,7 @@ impl<'self, T> Iterator<&'self T> for TreeSetRevIterator<'self, T> {
/// Advance the iterator to the next node (in order). If there are no more nodes, return `None`. /// Advance the iterator to the next node (in order). If there are no more nodes, return `None`.
#[inline] #[inline]
fn next(&mut self) -> Option<&'self T> { fn next(&mut self) -> Option<&'self T> {
do self.iter.next().map |&(value, _)| { value } do self.iter.next().map |(value, _)| { value }
} }
} }
@ -686,7 +686,7 @@ fn mutate_values<'r, K: TotalOrd, V>(node: &'r mut Option<~TreeNode<K, V>>,
// Remove left horizontal link by rotating right // Remove left horizontal link by rotating right
fn skew<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) { fn skew<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) {
if node.left.map_default(false, |x| x.level == node.level) { if node.left.as_ref().map_default(false, |x| x.level == node.level) {
let mut save = node.left.take_unwrap(); let mut save = node.left.take_unwrap();
swap(&mut node.left, &mut save.right); // save.right now None swap(&mut node.left, &mut save.right); // save.right now None
swap(node, &mut save); swap(node, &mut save);
@ -697,8 +697,8 @@ fn skew<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) {
// Remove dual horizontal link by rotating left and increasing level of // Remove dual horizontal link by rotating left and increasing level of
// the parent // the parent
fn split<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) { fn split<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>) {
if node.right.map_default(false, if node.right.as_ref().map_default(false,
|x| x.right.map_default(false, |y| y.level == node.level)) { |x| x.right.as_ref().map_default(false, |y| y.level == node.level)) {
let mut save = node.right.take_unwrap(); let mut save = node.right.take_unwrap();
swap(&mut node.right, &mut save.left); // save.left now None swap(&mut node.right, &mut save.left); // save.left now None
save.level += 1; save.level += 1;
@ -804,8 +804,8 @@ fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>,
}; };
if rebalance { if rebalance {
let left_level = save.left.map_default(0, |x| x.level); let left_level = save.left.as_ref().map_default(0, |x| x.level);
let right_level = save.right.map_default(0, |x| x.level); let right_level = save.right.as_ref().map_default(0, |x| x.level);
// re-balance, if necessary // re-balance, if necessary
if left_level < save.level - 1 || right_level < save.level - 1 { if left_level < save.level - 1 || right_level < save.level - 1 {

View file

@ -128,7 +128,7 @@ fn rustdoc_help() {
fn find_cmd(command_string: &str) -> Option<Command> { fn find_cmd(command_string: &str) -> Option<Command> {
do COMMANDS.iter().find |command| { do COMMANDS.iter().find |command| {
command.cmd == command_string command.cmd == command_string
}.map_move(|x| *x) }.map(|x| *x)
} }
fn cmd_help(args: &[~str]) -> ValidUsage { fn cmd_help(args: &[~str]) -> ValidUsage {

View file

@ -719,7 +719,7 @@ pub fn build_session_options(binary: @str,
} else if matches.opt_present("emit-llvm") { } else if matches.opt_present("emit-llvm") {
link::output_type_bitcode link::output_type_bitcode
} else { link::output_type_exe }; } else { link::output_type_exe };
let sysroot_opt = matches.opt_str("sysroot").map_move(|m| @Path(m)); let sysroot_opt = matches.opt_str("sysroot").map(|m| @Path(m));
let target = matches.opt_str("target").unwrap_or(host_triple()); let target = matches.opt_str("target").unwrap_or(host_triple());
let target_cpu = matches.opt_str("target-cpu").unwrap_or(~"generic"); let target_cpu = matches.opt_str("target-cpu").unwrap_or(~"generic");
let target_feature = matches.opt_str("target-feature").unwrap_or(~""); let target_feature = matches.opt_str("target-feature").unwrap_or(~"");

View file

@ -72,7 +72,7 @@ fn fold_mod(cx: &Context, m: &ast::_mod) -> ast::_mod {
filter_item(cx, *a).and_then(|x| cx.fold_item(x)) filter_item(cx, *a).and_then(|x| cx.fold_item(x))
}.collect(); }.collect();
let filtered_view_items = do m.view_items.iter().filter_map |a| { let filtered_view_items = do m.view_items.iter().filter_map |a| {
do filter_view_item(cx, a).map_move |x| { do filter_view_item(cx, a).map |x| {
cx.fold_view_item(x) cx.fold_view_item(x)
} }
}.collect(); }.collect();
@ -97,7 +97,7 @@ fn fold_foreign_mod(cx: &Context, nm: &ast::foreign_mod) -> ast::foreign_mod {
.filter_map(|a| filter_foreign_item(cx, *a)) .filter_map(|a| filter_foreign_item(cx, *a))
.collect(); .collect();
let filtered_view_items = do nm.view_items.iter().filter_map |a| { let filtered_view_items = do nm.view_items.iter().filter_map |a| {
do filter_view_item(cx, a).map_move |x| { do filter_view_item(cx, a).map |x| {
cx.fold_view_item(x) cx.fold_view_item(x)
} }
}.collect(); }.collect();
@ -152,12 +152,12 @@ fn fold_block(cx: &Context, b: &ast::Block) -> ast::Block {
filter_stmt(cx, *a).and_then(|stmt| cx.fold_stmt(stmt)) filter_stmt(cx, *a).and_then(|stmt| cx.fold_stmt(stmt))
}.collect(); }.collect();
let filtered_view_items = do b.view_items.iter().filter_map |a| { let filtered_view_items = do b.view_items.iter().filter_map |a| {
filter_view_item(cx, a).map(|x| cx.fold_view_item(*x)) filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
}.collect(); }.collect();
ast::Block { ast::Block {
view_items: filtered_view_items, view_items: filtered_view_items,
stmts: resulting_stmts, stmts: resulting_stmts,
expr: b.expr.map(|x| cx.fold_expr(*x)), expr: b.expr.map(|x| cx.fold_expr(x)),
id: b.id, id: b.id,
rules: b.rules, rules: b.rules,
span: b.span, span: b.span,

View file

@ -1786,7 +1786,7 @@ impl TypeNames {
} }
pub fn find_type(&self, s: &str) -> Option<Type> { pub fn find_type(&self, s: &str) -> Option<Type> {
self.named_types.find_equiv(&s).map_move(|x| Type::from_ref(*x)) self.named_types.find_equiv(&s).map(|x| Type::from_ref(*x))
} }
// We have a depth count, because we seem to make infinite types. // We have a depth count, because we seem to make infinite types.

View file

@ -133,7 +133,7 @@ pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore,
pub fn find_extern_mod_stmt_cnum(cstore: &CStore, pub fn find_extern_mod_stmt_cnum(cstore: &CStore,
emod_id: ast::NodeId) emod_id: ast::NodeId)
-> Option<ast::CrateNum> { -> Option<ast::CrateNum> {
cstore.extern_mod_crate_map.find(&emod_id).map_move(|x| *x) cstore.extern_mod_crate_map.find(&emod_id).map(|x| *x)
} }
#[deriving(Clone)] #[deriving(Clone)]

View file

@ -200,7 +200,7 @@ fn item_def_id(d: ebml::Doc, cdata: Cmd) -> ast::DefId {
} }
fn get_provided_source(d: ebml::Doc, cdata: Cmd) -> Option<ast::DefId> { fn get_provided_source(d: ebml::Doc, cdata: Cmd) -> Option<ast::DefId> {
do reader::maybe_get_doc(d, tag_item_method_provided_source).map_move |doc| { do reader::maybe_get_doc(d, tag_item_method_provided_source).map |doc| {
translate_def_id(cdata, reader::with_doc_data(doc, parse_def_id)) translate_def_id(cdata, reader::with_doc_data(doc, parse_def_id))
} }
} }
@ -267,7 +267,7 @@ fn item_ty_param_defs(item: ebml::Doc, tcx: ty::ctxt, cdata: Cmd,
} }
fn item_ty_region_param(item: ebml::Doc) -> Option<ty::region_variance> { fn item_ty_region_param(item: ebml::Doc) -> Option<ty::region_variance> {
do reader::maybe_get_doc(item, tag_region_param).map_move |doc| { do reader::maybe_get_doc(item, tag_region_param).map |doc| {
let mut decoder = reader::Decoder(doc); let mut decoder = reader::Decoder(doc);
Decodable::decode(&mut decoder) Decodable::decode(&mut decoder)
} }
@ -400,7 +400,7 @@ pub fn get_trait_def(cdata: Cmd,
do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| { do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
// NB. Bypasses real supertraits. See get_supertraits() if you wanted them. // NB. Bypasses real supertraits. See get_supertraits() if you wanted them.
let trait_ref = doc_trait_ref(trait_doc, tcx, cdata); let trait_ref = doc_trait_ref(trait_doc, tcx, cdata);
do tcx.lang_items.to_builtin_kind(trait_ref.def_id).map_move |bound| { do tcx.lang_items.to_builtin_kind(trait_ref.def_id).map |bound| {
bounds.add(bound); bounds.add(bound);
}; };
true true
@ -446,7 +446,7 @@ pub fn get_impl_trait(cdata: Cmd,
tcx: ty::ctxt) -> Option<@ty::TraitRef> tcx: ty::ctxt) -> Option<@ty::TraitRef>
{ {
let item_doc = lookup_item(id, cdata.data); let item_doc = lookup_item(id, cdata.data);
do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map_move |tp| { do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map |tp| {
@doc_trait_ref(tp, tcx, cdata) @doc_trait_ref(tp, tcx, cdata)
} }
} }

View file

@ -319,13 +319,13 @@ pub fn opt_loan_path(cmt: mc::cmt) -> Option<@LoanPath> {
} }
mc::cat_deref(cmt_base, _, pk) => { mc::cat_deref(cmt_base, _, pk) => {
do opt_loan_path(cmt_base).map_move |lp| { do opt_loan_path(cmt_base).map |lp| {
@LpExtend(lp, cmt.mutbl, LpDeref(pk)) @LpExtend(lp, cmt.mutbl, LpDeref(pk))
} }
} }
mc::cat_interior(cmt_base, ik) => { mc::cat_interior(cmt_base, ik) => {
do opt_loan_path(cmt_base).map_move |lp| { do opt_loan_path(cmt_base).map |lp| {
@LpExtend(lp, cmt.mutbl, LpInterior(ik)) @LpExtend(lp, cmt.mutbl, LpInterior(ik))
} }
} }

View file

@ -897,7 +897,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
// check legality of moving out of the enum // check legality of moving out of the enum
// x @ Foo(*) is legal, but x @ Foo(y) isn't. // x @ Foo(*) is legal, but x @ Foo(y) isn't.
if sub.map_move_default(false, |p| pat_contains_bindings(def_map, p)) { if sub.map_default(false, |p| pat_contains_bindings(def_map, p)) {
tcx.sess.span_err( tcx.sess.span_err(
p.span, p.span,
"cannot bind by-move with sub-bindings"); "cannot bind by-move with sub-bindings");

View file

@ -507,9 +507,9 @@ pub fn compare_lit_exprs(tcx: middle::ty::ctxt, a: &Expr, b: &Expr) -> Option<in
} }
pub fn lit_expr_eq(tcx: middle::ty::ctxt, a: &Expr, b: &Expr) -> Option<bool> { pub fn lit_expr_eq(tcx: middle::ty::ctxt, a: &Expr, b: &Expr) -> Option<bool> {
compare_lit_exprs(tcx, a, b).map_move(|val| val == 0) compare_lit_exprs(tcx, a, b).map(|val| val == 0)
} }
pub fn lit_eq(a: &lit, b: &lit) -> Option<bool> { pub fn lit_eq(a: &lit, b: &lit) -> Option<bool> {
compare_const_vals(&lit_to_const(a), &lit_to_const(b)).map_move(|val| val == 0) compare_const_vals(&lit_to_const(a), &lit_to_const(b)).map(|val| val == 0)
} }

View file

@ -418,7 +418,7 @@ impl<'self> LanguageItemCollector<'self> {
return; // Didn't match. return; // Didn't match.
} }
let item_index = self.item_refs.find_equiv(&value).map_move(|x| *x); let item_index = self.item_refs.find_equiv(&value).map(|x| *x);
// prevent borrow checker from considering ^~~~~~~~~~~ // prevent borrow checker from considering ^~~~~~~~~~~
// self to be borrowed (annoying) // self to be borrowed (annoying)

View file

@ -1001,7 +1001,7 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
match cx.tcx.items.find(&id.node) { match cx.tcx.items.find(&id.node) {
Some(ast_node) => { Some(ast_node) => {
let s = do ast_node.with_attrs |attrs| { let s = do ast_node.with_attrs |attrs| {
do attrs.map_move |a| { do attrs.map |a| {
attr::find_stability(a.iter().map(|a| a.meta())) attr::find_stability(a.iter().map(|a| a.meta()))
} }
}; };

View file

@ -619,7 +619,7 @@ impl Liveness {
match expr.node { match expr.node {
ExprPath(_) => { ExprPath(_) => {
let def = self.tcx.def_map.get_copy(&expr.id); let def = self.tcx.def_map.get_copy(&expr.id);
do moves::moved_variable_node_id_from_def(def).map_move |rdef| { do moves::moved_variable_node_id_from_def(def).map |rdef| {
self.variable(rdef, expr.span) self.variable(rdef, expr.span)
} }
} }
@ -635,7 +635,7 @@ impl Liveness {
-> Option<Variable> { -> Option<Variable> {
match self.tcx.def_map.find(&node_id) { match self.tcx.def_map.find(&node_id) {
Some(&def) => { Some(&def) => {
do moves::moved_variable_node_id_from_def(def).map_move |rdef| { do moves::moved_variable_node_id_from_def(def).map |rdef| {
self.variable(rdef, span) self.variable(rdef, span)
} }
} }

View file

@ -117,7 +117,7 @@ impl RegionMaps {
pub fn opt_encl_scope(&self, id: ast::NodeId) -> Option<ast::NodeId> { pub fn opt_encl_scope(&self, id: ast::NodeId) -> Option<ast::NodeId> {
//! Returns the narrowest scope that encloses `id`, if any. //! Returns the narrowest scope that encloses `id`, if any.
self.scope_map.find(&id).map_move(|x| *x) self.scope_map.find(&id).map(|x| *x)
} }
pub fn encl_scope(&self, id: ast::NodeId) -> ast::NodeId { pub fn encl_scope(&self, id: ast::NodeId) -> ast::NodeId {
@ -613,7 +613,7 @@ impl DetermineRpCtxt {
/// the new variance is joined with the old variance. /// the new variance is joined with the old variance.
pub fn add_rp(&mut self, id: ast::NodeId, variance: region_variance) { pub fn add_rp(&mut self, id: ast::NodeId, variance: region_variance) {
assert!(id != 0); assert!(id != 0);
let old_variance = self.region_paramd_items.find(&id).map_move(|x| *x); let old_variance = self.region_paramd_items.find(&id).map(|x| *x);
let joined_variance = match old_variance { let joined_variance = match old_variance {
None => variance, None => variance,
Some(v) => join_variance(v, variance) Some(v) => join_variance(v, variance)

View file

@ -3463,7 +3463,7 @@ impl Resolver {
// item, it's ok // item, it's ok
match def { match def {
DefTyParam(did, _) DefTyParam(did, _)
if self.def_map.find(&did.node).map_move(|x| *x) if self.def_map.find(&did.node).map(|x| *x)
== Some(DefTyParamBinder(item_id)) => { == Some(DefTyParamBinder(item_id)) => {
// ok // ok
} }
@ -4255,7 +4255,7 @@ impl Resolver {
} }
} }
do bounds.map |bound_vec| { do bounds.as_ref().map |bound_vec| {
for bound in bound_vec.iter() { for bound in bound_vec.iter() {
self.resolve_type_parameter_bound(ty.id, bound); self.resolve_type_parameter_bound(ty.id, bound);
} }
@ -4263,7 +4263,7 @@ impl Resolver {
} }
ty_closure(c) => { ty_closure(c) => {
do c.bounds.map |bounds| { do c.bounds.as_ref().map |bounds| {
for bound in bounds.iter() { for bound in bounds.iter() {
self.resolve_type_parameter_bound(ty.id, bound); self.resolve_type_parameter_bound(ty.id, bound);
} }

View file

@ -96,7 +96,7 @@ impl<T:Subst + 'static> Subst for @T {
impl<T:Subst> Subst for Option<T> { impl<T:Subst> Subst for Option<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> { fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> {
self.map(|t| t.subst(tcx, substs)) self.as_ref().map(|t| t.subst(tcx, substs))
} }
} }

View file

@ -113,7 +113,7 @@ pub struct _InsnCtxt { _x: () }
impl Drop for _InsnCtxt { impl Drop for _InsnCtxt {
fn drop(&mut self) { fn drop(&mut self) {
do local_data::modify(task_local_insn_key) |c| { do local_data::modify(task_local_insn_key) |c| {
do c.map_move |mut ctx| { do c.map |mut ctx| {
ctx.pop(); ctx.pop();
ctx ctx
} }
@ -124,7 +124,7 @@ impl Drop for _InsnCtxt {
pub fn push_ctxt(s: &'static str) -> _InsnCtxt { pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
debug2!("new InsnCtxt: {}", s); debug2!("new InsnCtxt: {}", s);
do local_data::modify(task_local_insn_key) |c| { do local_data::modify(task_local_insn_key) |c| {
do c.map_move |mut ctx| { do c.map |mut ctx| {
ctx.push(s); ctx.push(s);
ctx ctx
} }

View file

@ -161,7 +161,7 @@ fn struct_ty(ty: Type,
padding: Option<Type>, padding: Option<Type>,
coerce: bool) -> Type { coerce: bool) -> Type {
let size = ty_size(ty) * 8; let size = ty_size(ty) * 8;
let mut fields = padding.map_move_default(~[], |p| ~[p]); let mut fields = padding.map_default(~[], |p| ~[p]);
if coerce { if coerce {
fields = vec::append(fields, coerce_to_int(size)); fields = vec::append(fields, coerce_to_int(size));

View file

@ -634,7 +634,7 @@ impl get_node_info for ast::Block {
impl get_node_info for Option<@ast::Expr> { impl get_node_info for Option<@ast::Expr> {
fn info(&self) -> Option<NodeInfo> { fn info(&self) -> Option<NodeInfo> {
self.and_then_ref(|s| s.info()) self.as_ref().and_then(|s| s.info())
} }
} }
@ -1145,7 +1145,7 @@ pub fn node_id_type_params(bcx: &mut Block, id: ast::NodeId) -> ~[ty::t] {
pub fn node_vtables(bcx: @mut Block, id: ast::NodeId) pub fn node_vtables(bcx: @mut Block, id: ast::NodeId)
-> Option<typeck::vtable_res> { -> Option<typeck::vtable_res> {
let raw_vtables = bcx.ccx().maps.vtable_map.find(&id); let raw_vtables = bcx.ccx().maps.vtable_map.find(&id);
raw_vtables.map_move(|vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts)) raw_vtables.map(|vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts))
} }
// Apply the typaram substitutions in the FunctionContext to some // Apply the typaram substitutions in the FunctionContext to some

View file

@ -283,7 +283,7 @@ impl Drop for CrateContext {
local_data_key!(task_local_llcx_key: @ContextRef) local_data_key!(task_local_llcx_key: @ContextRef)
pub fn task_llcx() -> ContextRef { pub fn task_llcx() -> ContextRef {
let opt = local_data::get(task_local_llcx_key, |k| k.map_move(|k| *k)); let opt = local_data::get(task_local_llcx_key, |k| k.map(|k| *k));
*opt.expect("task-local LLVMContextRef wasn't ever set!") *opt.expect("task-local LLVMContextRef wasn't ever set!")
} }

View file

@ -77,7 +77,7 @@ pub fn llvm_calling_convention(ccx: &mut CrateContext,
abis: AbiSet) -> Option<CallConv> { abis: AbiSet) -> Option<CallConv> {
let arch = ccx.sess.targ_cfg.arch; let arch = ccx.sess.targ_cfg.arch;
abis.for_arch(arch).map(|abi| { abis.for_arch(arch).map(|abi| {
match *abi { match abi {
RustIntrinsic => { RustIntrinsic => {
// Intrinsics are emitted by monomorphic fn // Intrinsics are emitted by monomorphic fn
ccx.sess.bug(format!("Asked to register intrinsic fn")); ccx.sess.bug(format!("Asked to register intrinsic fn"));

View file

@ -176,7 +176,7 @@ pub fn trans_method_callee(bcx: @mut Block,
data: Method(MethodData { data: Method(MethodData {
llfn: callee_fn.llfn, llfn: callee_fn.llfn,
llself: val, llself: val,
temp_cleanup: temp_cleanups.head_opt().map_move(|v| *v), temp_cleanup: temp_cleanups.head_opt().map(|v| *v),
self_mode: mentry.self_mode, self_mode: mentry.self_mode,
}) })
} }
@ -356,7 +356,7 @@ pub fn trans_monomorphized_callee(bcx: @mut Block,
data: Method(MethodData { data: Method(MethodData {
llfn: llfn_val, llfn: llfn_val,
llself: llself_val, llself: llself_val,
temp_cleanup: temp_cleanups.head_opt().map_move(|v| *v), temp_cleanup: temp_cleanups.head_opt().map(|v| *v),
self_mode: mentry.self_mode, self_mode: mentry.self_mode,
}) })
} }

View file

@ -1349,7 +1349,7 @@ pub fn fold_bare_fn_ty(fty: &BareFnTy, fldop: &fn(t) -> t) -> BareFnTy {
fn fold_sty(sty: &sty, fldop: &fn(t) -> t) -> sty { fn fold_sty(sty: &sty, fldop: &fn(t) -> t) -> sty {
fn fold_substs(substs: &substs, fldop: &fn(t) -> t) -> substs { fn fold_substs(substs: &substs, fldop: &fn(t) -> t) -> substs {
substs {regions: substs.regions.clone(), substs {regions: substs.regions.clone(),
self_ty: substs.self_ty.map(|t| fldop(*t)), self_ty: substs.self_ty.map(|t| fldop(t)),
tps: substs.tps.map(|t| fldop(*t))} tps: substs.tps.map(|t| fldop(*t))}
} }
@ -1449,7 +1449,7 @@ pub fn fold_regions_and_ty(
substs { substs {
regions: regions, regions: regions,
self_ty: substs.self_ty.map(|t| fldt(*t)), self_ty: substs.self_ty.map(|t| fldt(t)),
tps: substs.tps.map(|t| fldt(*t)) tps: substs.tps.map(|t| fldt(*t))
} }
} }
@ -3619,7 +3619,7 @@ pub fn def_has_ty_params(def: ast::Def) -> bool {
} }
pub fn provided_source(cx: ctxt, id: ast::DefId) -> Option<ast::DefId> { pub fn provided_source(cx: ctxt, id: ast::DefId) -> Option<ast::DefId> {
cx.provided_method_sources.find(&id).map_move(|x| *x) cx.provided_method_sources.find(&id).map(|x| *x)
} }
pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] { pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] {
@ -3791,7 +3791,7 @@ fn struct_ctor_id(cx: ctxt, struct_did: ast::DefId) -> Option<ast::DefId> {
Some(&ast_map::node_item(item, _)) => { Some(&ast_map::node_item(item, _)) => {
match item.node { match item.node {
ast::item_struct(struct_def, _) => { ast::item_struct(struct_def, _) => {
do struct_def.ctor_id.map_move |ctor_id| { do struct_def.ctor_id.map |ctor_id| {
ast_util::local_def(ctor_id) ast_util::local_def(ctor_id)
} }
} }

View file

@ -645,7 +645,7 @@ fn ty_of_method_or_bare_fn<AC:AstConv,RS:RegionScope + Clone + 'static>(
in_binding_rscope(rscope, in_binding_rscope(rscope,
RegionParamNames(bound_lifetime_names.clone())); RegionParamNames(bound_lifetime_names.clone()));
let opt_transformed_self_ty = do opt_self_info.map_move |self_info| { let opt_transformed_self_ty = do opt_self_info.map |self_info| {
transform_self_ty(this, &rb, self_info) transform_self_ty(this, &rb, self_info)
}; };
@ -749,7 +749,7 @@ pub fn ty_of_closure<AC:AstConv,RS:RegionScope + Clone + 'static>(
RegionParamNames(bound_lifetime_names.clone())); RegionParamNames(bound_lifetime_names.clone()));
let input_tys = do decl.inputs.iter().enumerate().map |(i, a)| { let input_tys = do decl.inputs.iter().enumerate().map |(i, a)| {
let expected_arg_ty = do expected_sig.and_then_ref |e| { let expected_arg_ty = do expected_sig.as_ref().and_then |e| {
// no guarantee that the correct number of expected args // no guarantee that the correct number of expected args
// were supplied // were supplied
if i < e.inputs.len() {Some(e.inputs[i])} else {None} if i < e.inputs.len() {Some(e.inputs[i])} else {None}

View file

@ -165,7 +165,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| { |expected, actual| {
expected.map_move_default(~"", |e| { expected.map_default(~"", |e| {
format!("mismatched types: expected `{}` but found {}", format!("mismatched types: expected `{}` but found {}",
e, actual)})}, e, actual)})},
Some(expected), ~"a structure pattern", Some(expected), ~"a structure pattern",
@ -214,7 +214,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| { |expected, actual| {
expected.map_move_default(~"", |e| { expected.map_default(~"", |e| {
format!("mismatched types: expected `{}` but found {}", format!("mismatched types: expected `{}` but found {}",
e, actual)})}, e, actual)})},
Some(expected), ~"an enum or structure pattern", Some(expected), ~"an enum or structure pattern",
@ -519,7 +519,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| { |expected, actual| {
expected.map_move_default(~"", |e| { expected.map_default(~"", |e| {
format!("mismatched types: expected `{}` but found {}", format!("mismatched types: expected `{}` but found {}",
e, actual)})}, e, actual)})},
Some(expected), ~"a structure pattern", Some(expected), ~"a structure pattern",
@ -566,7 +566,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
}; };
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| { fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| {
expected.map_move_default(~"", |e| { expected.map_default(~"", |e| {
format!("mismatched types: expected `{}` but found {}", format!("mismatched types: expected `{}` but found {}",
e, actual)})}, Some(expected), ~"tuple", Some(&type_error)); e, actual)})}, Some(expected), ~"tuple", Some(&type_error));
fcx.write_error(pat.id); fcx.write_error(pat.id);
@ -616,7 +616,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
fcx.infcx().type_error_message_str_with_expected( fcx.infcx().type_error_message_str_with_expected(
pat.span, pat.span,
|expected, actual| { |expected, actual| {
expected.map_move_default(~"", |e| { expected.map_default(~"", |e| {
format!("mismatched types: expected `{}` but found {}", format!("mismatched types: expected `{}` but found {}",
e, actual)})}, e, actual)})},
Some(expected), Some(expected),
@ -675,7 +675,7 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
fcx.infcx().type_error_message_str_with_expected( fcx.infcx().type_error_message_str_with_expected(
span, span,
|expected, actual| { |expected, actual| {
expected.map_move_default(~"", |e| { expected.map_default(~"", |e| {
format!("mismatched types: expected `{}` but found {}", format!("mismatched types: expected `{}` but found {}",
e, actual)})}, e, actual)})},
Some(expected), Some(expected),

View file

@ -440,7 +440,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
|br| ty::re_free(ty::FreeRegion {scope_id: body.id, |br| ty::re_free(ty::FreeRegion {scope_id: body.id,
bound_region: br})); bound_region: br}));
let opt_self_info = let opt_self_info =
opt_self_info.map_move( opt_self_info.map(
|si| SelfInfo {self_ty: opt_self_ty.unwrap(), .. si}); |si| SelfInfo {self_ty: opt_self_ty.unwrap(), .. si});
(isr, opt_self_info, fn_sig) (isr, opt_self_info, fn_sig)
}; };
@ -540,7 +540,7 @@ pub fn check_method(ccx: @mut CrateCtxt,
{ {
let method_def_id = local_def(method.id); let method_def_id = local_def(method.id);
let method_ty = ty::method(ccx.tcx, method_def_id); let method_ty = ty::method(ccx.tcx, method_def_id);
let opt_self_info = method_ty.transformed_self_ty.map_move(|ty| { let opt_self_info = method_ty.transformed_self_ty.map(|ty| {
SelfInfo {self_ty: ty, SelfInfo {self_ty: ty,
self_id: method.self_id, self_id: method.self_id,
span: method.explicit_self.span} span: method.explicit_self.span}
@ -561,7 +561,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
for p in fields.iter() { for p in fields.iter() {
let (id, sp) = *p; let (id, sp) = *p;
let orig_sp = field_names.find(&id).map_move(|x| *x); let orig_sp = field_names.find(&id).map(|x| *x);
match orig_sp { match orig_sp {
Some(orig_sp) => { Some(orig_sp) => {
tcx.sess.span_err(sp, format!("Duplicate field name {} in record type declaration", tcx.sess.span_err(sp, format!("Duplicate field name {} in record type declaration",
@ -605,7 +605,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
check_bare_fn(ccx, decl, body, it.id, None); check_bare_fn(ccx, decl, body, it.id, None);
} }
ast::item_impl(_, _, _, ref ms) => { ast::item_impl(_, _, _, ref ms) => {
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_move(|x| *x); let rp = ccx.tcx.region_paramd_items.find(&it.id).map(|x| *x);
debug2!("item_impl {} with id {} rp {:?}", debug2!("item_impl {} with id {} rp {:?}",
ccx.tcx.sess.str_of(it.ident), it.id, rp); ccx.tcx.sess.str_of(it.ident), it.id, rp);
for m in ms.iter() { for m in ms.iter() {
@ -2026,7 +2026,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
for field in ast_fields.iter() { for field in ast_fields.iter() {
let mut expected_field_type = ty::mk_err(); let mut expected_field_type = ty::mk_err();
let pair = class_field_map.find(&field.ident.name).map_move(|x| *x); let pair = class_field_map.find(&field.ident.name).map(|x| *x);
match pair { match pair {
None => { None => {
tcx.sess.span_err( tcx.sess.span_err(
@ -2110,7 +2110,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
if class_id.crate == ast::LOCAL_CRATE { if class_id.crate == ast::LOCAL_CRATE {
region_parameterized = region_parameterized =
tcx.region_paramd_items.find(&class_id.node). tcx.region_paramd_items.find(&class_id.node).
map_move(|x| *x); map(|x| *x);
match tcx.items.find(&class_id.node) { match tcx.items.find(&class_id.node) {
Some(&ast_map::node_item(@ast::item { Some(&ast_map::node_item(@ast::item {
node: ast::item_struct(_, ref generics), node: ast::item_struct(_, ref generics),
@ -2198,7 +2198,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let raw_type; let raw_type;
if enum_id.crate == ast::LOCAL_CRATE { if enum_id.crate == ast::LOCAL_CRATE {
region_parameterized = region_parameterized =
tcx.region_paramd_items.find(&enum_id.node).map_move(|x| *x); tcx.region_paramd_items.find(&enum_id.node).map(|x| *x);
match tcx.items.find(&enum_id.node) { match tcx.items.find(&enum_id.node) {
Some(&ast_map::node_item(@ast::item { Some(&ast_map::node_item(@ast::item {
node: ast::item_enum(_, ref generics), node: ast::item_enum(_, ref generics),

View file

@ -36,7 +36,7 @@ pub fn replace_bound_regions_in_fn_sig(
debug2!("replace_bound_regions_in_fn_sig(self_ty={:?}, fn_sig={}, \ debug2!("replace_bound_regions_in_fn_sig(self_ty={:?}, fn_sig={}, \
all_tys={:?})", all_tys={:?})",
opt_self_ty.map(|t| ppaux::ty_to_str(tcx, *t)), opt_self_ty.map(|t| ppaux::ty_to_str(tcx, t)),
ppaux::fn_sig_to_str(tcx, fn_sig), ppaux::fn_sig_to_str(tcx, fn_sig),
all_tys.map(|t| ppaux::ty_to_str(tcx, *t))); all_tys.map(|t| ppaux::ty_to_str(tcx, *t)));
let _i = indenter(); let _i = indenter();
@ -48,12 +48,12 @@ pub fn replace_bound_regions_in_fn_sig(
let new_fn_sig = ty::fold_sig(fn_sig, |t| { let new_fn_sig = ty::fold_sig(fn_sig, |t| {
replace_bound_regions(tcx, isr, t) replace_bound_regions(tcx, isr, t)
}); });
let new_self_ty = opt_self_ty.map(|t| replace_bound_regions(tcx, isr, *t)); let new_self_ty = opt_self_ty.map(|t| replace_bound_regions(tcx, isr, t));
debug2!("result of replace_bound_regions_in_fn_sig: \ debug2!("result of replace_bound_regions_in_fn_sig: \
new_self_ty={:?}, \ new_self_ty={:?}, \
fn_sig={}", fn_sig={}",
new_self_ty.map(|t| ppaux::ty_to_str(tcx, *t)), new_self_ty.map(|t| ppaux::ty_to_str(tcx, t)),
ppaux::fn_sig_to_str(tcx, &new_fn_sig)); ppaux::fn_sig_to_str(tcx, &new_fn_sig));
return (isr, new_self_ty, new_fn_sig); return (isr, new_self_ty, new_fn_sig);

View file

@ -141,7 +141,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
// Substitute the values of the type parameters that may // Substitute the values of the type parameters that may
// appear in the bound. // appear in the bound.
let trait_ref = substs.map_default(trait_ref, |substs| { let trait_ref = substs.as_ref().map_default(trait_ref, |substs| {
debug2!("about to subst: {}, {}", debug2!("about to subst: {}, {}",
trait_ref.repr(tcx), substs.repr(tcx)); trait_ref.repr(tcx), substs.repr(tcx));
trait_ref.subst(tcx, *substs) trait_ref.subst(tcx, *substs)
@ -330,8 +330,7 @@ fn search_for_vtable(vcx: &VtableContext,
// XXX: this is a bad way to do this, since we do // XXX: this is a bad way to do this, since we do
// pointless allocations. // pointless allocations.
let impls = tcx.trait_impls.find(&trait_ref.def_id) let impls = tcx.trait_impls.find(&trait_ref.def_id).map_default(@mut ~[], |x| *x);
.map_default(@mut ~[], |x| **x);
// impls is the list of all impls in scope for trait_ref. // impls is the list of all impls in scope for trait_ref.
for im in impls.iter() { for im in impls.iter() {
// im is one specific impl of trait_ref. // im is one specific impl of trait_ref.
@ -485,7 +484,7 @@ fn fixup_substs(vcx: &VtableContext,
ast::MutImmutable, ast::MutImmutable,
ty::EmptyBuiltinBounds()); ty::EmptyBuiltinBounds());
do fixup_ty(vcx, location_info, t, is_early).map |t_f| { do fixup_ty(vcx, location_info, t, is_early).map |t_f| {
match ty::get(*t_f).sty { match ty::get(t_f).sty {
ty::ty_trait(_, ref substs_f, _, _, _) => (*substs_f).clone(), ty::ty_trait(_, ref substs_f, _, _, _) => (*substs_f).clone(),
_ => fail2!("t_f should be a trait") _ => fail2!("t_f should be a trait")
} }

View file

@ -209,7 +209,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
trait_id: ast::NodeId) trait_id: ast::NodeId)
{ {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let region_paramd = tcx.region_paramd_items.find(&trait_id).map_move(|x| *x); let region_paramd = tcx.region_paramd_items.find(&trait_id).map(|x| *x);
match tcx.items.get_copy(&trait_id) { match tcx.items.get_copy(&trait_id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_trait(ref generics, _, ref ms), node: ast::item_trait(ref generics, _, ref ms),
@ -843,7 +843,7 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
pub fn convert(ccx: &CrateCtxt, it: &ast::item) { pub fn convert(ccx: &CrateCtxt, it: &ast::item) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x); let rp = tcx.region_paramd_items.find(&it.id).map(|x| *x);
debug2!("convert: item {} with id {} rp {:?}", debug2!("convert: item {} with id {} rp {:?}",
tcx.sess.str_of(it.ident), it.id, rp); tcx.sess.str_of(it.ident), it.id, rp);
match it.node { match it.node {
@ -1064,7 +1064,7 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::item) -> @ty::TraitDef {
Some(&def) => return def, Some(&def) => return def,
_ => {} _ => {}
} }
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x); let rp = tcx.region_paramd_items.find(&it.id).map(|x| *x);
match it.node { match it.node {
ast::item_trait(ref generics, ref supertraits, _) => { ast::item_trait(ref generics, ref supertraits, _) => {
let self_ty = ty::mk_self(tcx, def_id); let self_ty = ty::mk_self(tcx, def_id);
@ -1096,7 +1096,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
Some(&tpt) => return tpt, Some(&tpt) => return tpt,
_ => {} _ => {}
} }
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x); let rp = tcx.region_paramd_items.find(&it.id).map(|x| *x);
match it.node { match it.node {
ast::item_static(ref t, _, _) => { ast::item_static(ref t, _, _) => {
let typ = ccx.to_ty(&EmptyRscope, t); let typ = ccx.to_ty(&EmptyRscope, t);
@ -1133,7 +1133,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
None => { } None => { }
} }
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x); let rp = tcx.region_paramd_items.find(&it.id).map(|x| *x);
let region_parameterization = let region_parameterization =
RegionParameterization::from_variance_and_generics(rp, generics); RegionParameterization::from_variance_and_generics(rp, generics);
let tpt = { let tpt = {

View file

@ -727,13 +727,13 @@ impl InferCtxt {
err: Option<&ty::type_err>) { err: Option<&ty::type_err>) {
debug2!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty); debug2!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
let error_str = do err.map_move_default(~"") |t_err| { let error_str = do err.map_default(~"") |t_err| {
format!(" ({})", ty::type_err_to_str(self.tcx, t_err)) format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
}; };
let resolved_expected = do expected_ty.map_move |e_ty| { let resolved_expected = do expected_ty.map |e_ty| {
self.resolve_type_vars_if_possible(e_ty) self.resolve_type_vars_if_possible(e_ty)
}; };
if !resolved_expected.map_move_default(false, |e| { ty::type_is_error(e) }) { if !resolved_expected.map_default(false, |e| { ty::type_is_error(e) }) {
match resolved_expected { match resolved_expected {
None => self.tcx.sess.span_err(sp, None => self.tcx.sess.span_err(sp,
format!("{}{}", mk_msg(None, actual_ty), error_str)), format!("{}{}", mk_msg(None, actual_ty), error_str)),

View file

@ -261,10 +261,10 @@ pub fn run_compiler(args: &[~str], demitter: @diagnostic::Emitter) {
let sopts = build_session_options(binary, matches, demitter); let sopts = build_session_options(binary, matches, demitter);
let sess = build_session(sopts, demitter); let sess = build_session(sopts, demitter);
let odir = matches.opt_str("out-dir").map_move(|o| Path(o)); let odir = matches.opt_str("out-dir").map(|o| Path(o));
let ofile = matches.opt_str("o").map_move(|o| Path(o)); let ofile = matches.opt_str("o").map(|o| Path(o));
let cfg = build_configuration(sess); let cfg = build_configuration(sess);
let pretty = do matches.opt_default("pretty", "normal").map_move |a| { let pretty = do matches.opt_default("pretty", "normal").map |a| {
parse_pretty(sess, a) parse_pretty(sess, a)
}; };
match pretty { match pretty {

View file

@ -1008,7 +1008,7 @@ impl Clean<ViewItemInner> for ast::view_item_ {
fn clean(&self) -> ViewItemInner { fn clean(&self) -> ViewItemInner {
match self { match self {
&ast::view_item_extern_mod(ref i, ref p, ref mi, ref id) => &ast::view_item_extern_mod(ref i, ref p, ref mi, ref id) =>
ExternMod(i.clean(), p.map(|&(ref x, _)| x.to_owned()), mi.clean(), *id), ExternMod(i.clean(), p.map(|(ref x, _)| x.to_owned()), mi.clean(), *id),
&ast::view_item_use(ref vp) => Import(vp.clean()) &ast::view_item_use(ref vp) => Import(vp.clean())
} }
} }
@ -1208,5 +1208,5 @@ fn resolve_use_source(path: Path, id: ast::NodeId) -> ImportSource {
fn resolve_def(id: ast::NodeId) -> Option<ast::DefId> { fn resolve_def(id: ast::NodeId) -> Option<ast::DefId> {
let dm = local_data::get(super::ctxtkey, |x| *x.unwrap()).tycx.def_map; let dm = local_data::get(super::ctxtkey, |x| *x.unwrap()).tycx.def_map;
dm.find(&id).map_move(|&d| ast_util::def_id_of_def(d)) dm.find(&id).map(|&d| ast_util::def_id_of_def(d))
} }

View file

@ -134,7 +134,7 @@ pub fn main_args(args: &[~str]) -> int {
info2!("going to format"); info2!("going to format");
let started = time::precise_time_ns(); let started = time::precise_time_ns();
let output = matches.opt_str("o").map(|s| Path(*s)); let output = matches.opt_str("o").map(|s| Path(s));
match matches.opt_str("w") { match matches.opt_str("w") {
Some(~"html") | None => { Some(~"html") | None => {
html::render::run(crate, output.unwrap_or(Path("doc"))) html::render::run(crate, output.unwrap_or(Path("doc")))

View file

@ -220,7 +220,7 @@ fn run(mut program: ~Program, binary: ~str, lib_search_paths: ~[~str],
} }
} }
} }
result = do blk.expr.map_move |e| { result = do blk.expr.map |e| {
do with_pp(intr) |pp, _| { pprust::print_expr(pp, e); } do with_pp(intr) |pp, _| { pprust::print_expr(pp, e); }
}; };
} }

View file

@ -230,7 +230,7 @@ impl PkgSrc {
/// True if the given path's stem is self's pkg ID's stem /// True if the given path's stem is self's pkg ID's stem
fn stem_matches(&self, p: &Path) -> bool { fn stem_matches(&self, p: &Path) -> bool {
p.filestem().map_default(false, |p| { p == &self.id.short_name.as_slice() }) p.filestem().map_default(false, |p| { p == self.id.short_name.as_slice() })
} }
pub fn push_crate(cs: &mut ~[Crate], prefix: uint, p: &Path) { pub fn push_crate(cs: &mut ~[Crate], prefix: uint, p: &Path) {

View file

@ -272,7 +272,7 @@ fn library_in(short_name: &str, version: &Version, dir_to_search: &Path) -> Opti
// Return the filename that matches, which we now know exists // Return the filename that matches, which we now know exists
// (if result_filename != None) // (if result_filename != None)
let abs_path = do result_filename.map |result_filename| { let abs_path = do result_filename.map |result_filename| {
let absolute_path = dir_to_search.push_rel(result_filename); let absolute_path = dir_to_search.push_rel(&result_filename);
debug2!("result_filename = {}", absolute_path.to_str()); debug2!("result_filename = {}", absolute_path.to_str());
absolute_path absolute_path
}; };

View file

@ -558,7 +558,8 @@ impl CtxMethods for BuildContext {
let maybe_executable = built_executable_in_workspace(id, source_workspace); let maybe_executable = built_executable_in_workspace(id, source_workspace);
let maybe_library = built_library_in_workspace(id, source_workspace); let maybe_library = built_library_in_workspace(id, source_workspace);
let target_exec = target_executable_in_workspace(id, target_workspace); let target_exec = target_executable_in_workspace(id, target_workspace);
let target_lib = maybe_library.map(|_p| target_library_in_workspace(id, target_workspace)); let target_lib = maybe_library.as_ref()
.map(|_| target_library_in_workspace(id, target_workspace));
debug2!("target_exec = {} target_lib = {:?} \ debug2!("target_exec = {} target_lib = {:?} \
maybe_executable = {:?} maybe_library = {:?}", maybe_executable = {:?} maybe_library = {:?}",

View file

@ -359,7 +359,7 @@ fn test_executable_exists(repo: &Path, short_name: &str) -> bool {
debug2!("test_executable_exists: repo = {}, short_name = {}", repo.to_str(), short_name); debug2!("test_executable_exists: repo = {}, short_name = {}", repo.to_str(), short_name);
let exec = built_test_in_workspace(&PkgId::new(short_name), repo); let exec = built_test_in_workspace(&PkgId::new(short_name), repo);
do exec.map_default(false) |exec| { do exec.map_default(false) |exec| {
os::path_exists(exec) && is_rwx(exec) os::path_exists(&exec) && is_rwx(&exec)
} }
} }
@ -538,8 +538,8 @@ fn test_install_valid() {
let lib = installed_library_in_workspace(&temp_pkg_id.path, &temp_workspace); let lib = installed_library_in_workspace(&temp_pkg_id.path, &temp_workspace);
debug2!("lib = {:?}", lib); debug2!("lib = {:?}", lib);
assert!(lib.map_default(false, |l| os::path_exists(l))); assert!(lib.as_ref().map_default(false, |l| os::path_exists(l)));
assert!(lib.map_default(false, |l| is_rwx(l))); assert!(lib.as_ref().map_default(false, |l| is_rwx(l)));
// And that the test and bench executables aren't installed // And that the test and bench executables aren't installed
assert!(!os::path_exists(&target_test_in_workspace(&temp_pkg_id, &temp_workspace))); assert!(!os::path_exists(&target_test_in_workspace(&temp_pkg_id, &temp_workspace)));
@ -827,8 +827,8 @@ fn rustpkg_clean_no_arg() {
command_line_test([~"build"], &package_dir); command_line_test([~"build"], &package_dir);
assert_built_executable_exists(&tmp, "foo"); assert_built_executable_exists(&tmp, "foo");
command_line_test([~"clean"], &package_dir); command_line_test([~"clean"], &package_dir);
assert!(!built_executable_in_workspace(&PkgId::new("foo"), let res = built_executable_in_workspace(&PkgId::new("foo"), &tmp);
&tmp).map_default(false, |m| { os::path_exists(m) })); assert!(!res.as_ref().map_default(false, |m| { os::path_exists(m) }));
} }
#[test] #[test]

View file

@ -554,5 +554,5 @@ pub fn datestamp(p: &Path) -> Option<libc::time_t> {
debug2!("Scrutinizing datestamp for {} - does it exist? {:?}", p.to_str(), os::path_exists(p)); debug2!("Scrutinizing datestamp for {} - does it exist? {:?}", p.to_str(), os::path_exists(p));
let out = p.stat().map(|stat| stat.st_mtime); let out = p.stat().map(|stat| stat.st_mtime);
debug2!("Date = {:?}", out); debug2!("Date = {:?}", out);
out.map(|t| { *t as libc::time_t }) out.map(|t| { t as libc::time_t })
} }

View file

@ -106,7 +106,7 @@ impl<T, U> Condition<T, U> {
/// ``` /// ```
pub fn trap<'a>(&'a self, h: &'a fn(T) -> U) -> Trap<'a, T, U> { pub fn trap<'a>(&'a self, h: &'a fn(T) -> U) -> Trap<'a, T, U> {
let h: Closure = unsafe { ::cast::transmute(h) }; let h: Closure = unsafe { ::cast::transmute(h) };
let prev = local_data::get(self.key, |k| k.map(|&x| *x)); let prev = local_data::get(self.key, |k| k.map(|x| *x));
let h = @Handler { handle: h, prev: prev }; let h = @Handler { handle: h, prev: prev };
Trap { cond: self, handler: h } Trap { cond: self, handler: h }
} }

View file

@ -239,7 +239,7 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
let len_buckets = self.buckets.len(); let len_buckets = self.buckets.len();
let bucket = self.buckets[idx].take(); let bucket = self.buckets[idx].take();
let value = do bucket.map_move |bucket| { let value = do bucket.map |bucket| {
bucket.value bucket.value
}; };
@ -480,7 +480,7 @@ impl<K: Hash + Eq, V> HashMap<K, V> {
impl<K: Hash + Eq, V: Clone> HashMap<K, V> { impl<K: Hash + Eq, V: Clone> HashMap<K, V> {
/// Like `find`, but returns a copy of the value. /// Like `find`, but returns a copy of the value.
pub fn find_copy(&self, k: &K) -> Option<V> { pub fn find_copy(&self, k: &K) -> Option<V> {
self.find(k).map_move(|v| (*v).clone()) self.find(k).map(|v| (*v).clone())
} }
/// Like `get`, but returns a copy of the value. /// Like `get`, but returns a copy of the value.

View file

@ -617,7 +617,7 @@ pub trait Iterator<A> {
Some((y, y_val)) Some((y, y_val))
} }
} }
}).map_move(|(x, _)| x) }).map(|(x, _)| x)
} }
/// Return the element that gives the minimum value from the /// Return the element that gives the minimum value from the
@ -641,7 +641,7 @@ pub trait Iterator<A> {
Some((y, y_val)) Some((y, y_val))
} }
} }
}).map_move(|(x, _)| x) }).map(|(x, _)| x)
} }
} }
@ -1550,8 +1550,8 @@ impl<'self, A, T: Iterator<A>, B, U: Iterator<B>> Iterator<B> for FlatMap<'self,
return Some(x) return Some(x)
} }
} }
match self.iter.next().map_move(|x| (self.f)(x)) { match self.iter.next().map(|x| (self.f)(x)) {
None => return self.backiter.and_then_mut_ref(|it| it.next()), None => return self.backiter.as_mut().and_then(|it| it.next()),
next => self.frontiter = next, next => self.frontiter = next,
} }
} }
@ -1559,8 +1559,8 @@ impl<'self, A, T: Iterator<A>, B, U: Iterator<B>> Iterator<B> for FlatMap<'self,
#[inline] #[inline]
fn size_hint(&self) -> (uint, Option<uint>) { fn size_hint(&self) -> (uint, Option<uint>) {
let (flo, fhi) = self.frontiter.map_default((0, Some(0)), |it| it.size_hint()); let (flo, fhi) = self.frontiter.as_ref().map_default((0, Some(0)), |it| it.size_hint());
let (blo, bhi) = self.backiter.map_default((0, Some(0)), |it| it.size_hint()); let (blo, bhi) = self.backiter.as_ref().map_default((0, Some(0)), |it| it.size_hint());
let lo = flo.saturating_add(blo); let lo = flo.saturating_add(blo);
match (self.iter.size_hint(), fhi, bhi) { match (self.iter.size_hint(), fhi, bhi) {
((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(&b)), ((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(&b)),
@ -1582,8 +1582,8 @@ impl<'self,
y => return y y => return y
} }
} }
match self.iter.next_back().map_move(|x| (self.f)(x)) { match self.iter.next_back().map(|x| (self.f)(x)) {
None => return self.frontiter.and_then_mut_ref(|it| it.next_back()), None => return self.frontiter.as_mut().and_then(|it| it.next_back()),
next => self.backiter = next, next => self.backiter = next,
} }
} }

View file

@ -355,16 +355,16 @@ mod tests {
set(my_key, @~"parent data"); set(my_key, @~"parent data");
do task::spawn { do task::spawn {
// TLS shouldn't carry over. // TLS shouldn't carry over.
assert!(get(my_key, |k| k.map_move(|k| *k)).is_none()); assert!(get(my_key, |k| k.map(|k| *k)).is_none());
set(my_key, @~"child data"); set(my_key, @~"child data");
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == assert!(*(get(my_key, |k| k.map(|k| *k)).unwrap()) ==
~"child data"); ~"child data");
// should be cleaned up for us // should be cleaned up for us
} }
// Must work multiple times // Must work multiple times
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"parent data"); assert!(*(get(my_key, |k| k.map(|k| *k)).unwrap()) == ~"parent data");
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"parent data"); assert!(*(get(my_key, |k| k.map(|k| *k)).unwrap()) == ~"parent data");
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"parent data"); assert!(*(get(my_key, |k| k.map(|k| *k)).unwrap()) == ~"parent data");
} }
#[test] #[test]
@ -372,7 +372,7 @@ mod tests {
static my_key: Key<@~str> = &Key; static my_key: Key<@~str> = &Key;
set(my_key, @~"first data"); set(my_key, @~"first data");
set(my_key, @~"next data"); // Shouldn't leak. set(my_key, @~"next data"); // Shouldn't leak.
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"next data"); assert!(*(get(my_key, |k| k.map(|k| *k)).unwrap()) == ~"next data");
} }
#[test] #[test]

View file

@ -96,6 +96,30 @@ impl<T: ToStr> ToStr for Option<T> {
} }
impl<T> Option<T> { impl<T> Option<T> {
/// Convert from `Option<T>` to `Option<&T>`
#[inline]
pub fn as_ref<'r>(&'r self) -> Option<&'r T> {
match *self { Some(ref x) => Some(x), None => None }
}
/// Convert from `Option<T>` to `Option<&mut T>`
#[inline]
pub fn as_mut<'r>(&'r mut self) -> Option<&'r mut T> {
match *self { Some(ref mut x) => Some(x), None => None }
}
/// Maps an `Option<T>` to `Option<U>` by applying a function to a contained value.
#[inline]
pub fn map<U>(self, f: &fn(T) -> U) -> Option<U> {
match self { Some(x) => Some(f(x)), None => None }
}
/// Applies a function to the contained value or returns a default.
#[inline]
pub fn map_default<U>(self, def: U, f: &fn(T) -> U) -> U {
match self { None => def, Some(t) => f(t) }
}
/// Return an iterator over the possibly contained value /// Return an iterator over the possibly contained value
#[inline] #[inline]
pub fn iter<'r>(&'r self) -> OptionIterator<&'r T> { pub fn iter<'r>(&'r self) -> OptionIterator<&'r T> {
@ -149,26 +173,6 @@ impl<T> Option<T> {
} }
} }
/// Returns `None` if the option is `None`, otherwise calls `f` with a
/// reference to the wrapped value and returns the result.
#[inline]
pub fn and_then_ref<'a, U>(&'a self, f: &fn(&'a T) -> Option<U>) -> Option<U> {
match *self {
Some(ref x) => f(x),
None => None
}
}
/// Returns `None` if the option is `None`, otherwise calls `f` with a
/// mutable reference to the wrapped value and returns the result.
#[inline]
pub fn and_then_mut_ref<'a, U>(&'a mut self, f: &fn(&'a mut T) -> Option<U>) -> Option<U> {
match *self {
Some(ref mut x) => f(x),
None => None
}
}
/// Returns the option if it contains a value, otherwise returns `optb`. /// Returns the option if it contains a value, otherwise returns `optb`.
#[inline] #[inline]
pub fn or(self, optb: Option<T>) -> Option<T> { pub fn or(self, optb: Option<T>) -> Option<T> {
@ -197,45 +201,6 @@ impl<T> Option<T> {
} }
} }
/// Maps a `Some` value from one type to another by reference
#[inline]
pub fn map<'a, U>(&'a self, f: &fn(&'a T) -> U) -> Option<U> {
match *self { Some(ref x) => Some(f(x)), None => None }
}
/// Maps a `Some` value from one type to another by a mutable reference
#[inline]
pub fn map_mut<'a, U>(&'a mut self, f: &fn(&'a mut T) -> U) -> Option<U> {
match *self { Some(ref mut x) => Some(f(x)), None => None }
}
/// Applies a function to the contained value or returns a default
#[inline]
pub fn map_default<'a, U>(&'a self, def: U, f: &fn(&'a T) -> U) -> U {
match *self { None => def, Some(ref t) => f(t) }
}
/// Maps a `Some` value from one type to another by a mutable reference,
/// or returns a default value.
#[inline]
pub fn map_mut_default<'a, U>(&'a mut self, def: U, f: &fn(&'a mut T) -> U) -> U {
match *self { Some(ref mut x) => f(x), None => def }
}
/// As `map`, but consumes the option and gives `f` ownership to avoid
/// copying.
#[inline]
pub fn map_move<U>(self, f: &fn(T) -> U) -> Option<U> {
match self { Some(x) => Some(f(x)), None => None }
}
/// As `map_default`, but consumes the option and gives `f`
/// ownership to avoid copying.
#[inline]
pub fn map_move_default<U>(self, def: U, f: &fn(T) -> U) -> U {
match self { None => def, Some(t) => f(t) }
}
/// Take the value out of the option, leaving a `None` in its place. /// Take the value out of the option, leaving a `None` in its place.
#[inline] #[inline]
pub fn take(&mut self) -> Option<T> { pub fn take(&mut self) -> Option<T> {

View file

@ -514,7 +514,7 @@ pub fn self_exe_path() -> Option<Path> {
} }
} }
load_self().map_move(|path| Path(path).dir_path()) load_self().map(|path| Path(path).dir_path())
} }

View file

@ -689,7 +689,7 @@ local_data_key!(TASK_RNG_KEY: @mut TaskRng)
/// the same sequence always. If absolute consistency is required, /// the same sequence always. If absolute consistency is required,
/// explicitly select an RNG, e.g. `IsaacRng` or `Isaac64Rng`. /// explicitly select an RNG, e.g. `IsaacRng` or `Isaac64Rng`.
pub fn task_rng() -> @mut TaskRng { pub fn task_rng() -> @mut TaskRng {
let r = local_data::get(TASK_RNG_KEY, |k| k.map(|&k| *k)); let r = local_data::get(TASK_RNG_KEY, |k| k.map(|k| *k));
match r { match r {
None => { None => {
let rng = @mut reseeding::ReseedingRng::new(StdRng::new(), let rng = @mut reseeding::ReseedingRng::new(StdRng::new(),

View file

@ -74,7 +74,7 @@ mod imp {
with_lock(|| unsafe { with_lock(|| unsafe {
let ptr = get_global_ptr(); let ptr = get_global_ptr();
let val = util::replace(&mut *ptr, None); let val = util::replace(&mut *ptr, None);
val.map(|s: &~~[~str]| (**s).clone()) val.as_ref().map(|s: &~~[~str]| (**s).clone())
}) })
} }
@ -89,7 +89,7 @@ mod imp {
pub fn clone() -> Option<~[~str]> { pub fn clone() -> Option<~[~str]> {
with_lock(|| unsafe { with_lock(|| unsafe {
let ptr = get_global_ptr(); let ptr = get_global_ptr();
(*ptr).map(|s: &~~[~str]| (**s).clone()) (*ptr).as_ref().map(|s: &~~[~str]| (**s).clone())
}) })
} }

View file

@ -165,7 +165,7 @@ impl<T> ChanOne<T> {
// Port is blocked. Wake it up. // Port is blocked. Wake it up.
let recvr = BlockedTask::cast_from_uint(task_as_state); let recvr = BlockedTask::cast_from_uint(task_as_state);
if do_resched { if do_resched {
do recvr.wake().map_move |woken_task| { do recvr.wake().map |woken_task| {
Scheduler::run_task(woken_task); Scheduler::run_task(woken_task);
}; };
} else { } else {
@ -391,7 +391,7 @@ impl<T> Drop for ChanOne<T> {
// The port is blocked waiting for a message we will never send. Wake it. // The port is blocked waiting for a message we will never send. Wake it.
rtassert!((*this.packet()).payload.is_none()); rtassert!((*this.packet()).payload.is_none());
let recvr = BlockedTask::cast_from_uint(task_as_state); let recvr = BlockedTask::cast_from_uint(task_as_state);
do recvr.wake().map_move |woken_task| { do recvr.wake().map |woken_task| {
Scheduler::run_task(woken_task); Scheduler::run_task(woken_task);
}; };
} }
@ -501,7 +501,7 @@ impl<T> GenericPort<T> for Port<T> {
} }
fn try_recv(&self) -> Option<T> { fn try_recv(&self) -> Option<T> {
do self.next.take_opt().map_move_default(None) |pone| { do self.next.take_opt().map_default(None) |pone| {
match pone.try_recv() { match pone.try_recv() {
Some(StreamPayload { val, next }) => { Some(StreamPayload { val, next }) => {
self.next.put_back(next); self.next.put_back(next);

View file

@ -203,7 +203,7 @@ impl<'self> Parser<'self> {
return None; return None;
} }
let octet = self.read_number(10, 3, 0x100).map(|&n| n as u8); let octet = self.read_number(10, 3, 0x100).map(|n| n as u8);
match octet { match octet {
Some(d) => bs[i] = d, Some(d) => bs[i] = d,
None => return None, None => return None,
@ -252,7 +252,7 @@ impl<'self> Parser<'self> {
let group = do p.read_atomically |p| { let group = do p.read_atomically |p| {
if i == 0 || p.read_given_char(':').is_some() { if i == 0 || p.read_given_char(':').is_some() {
p.read_number(16, 4, 0x10000).map(|&n| n as u16) p.read_number(16, 4, 0x10000).map(|n| n as u16)
} else { } else {
None None
} }
@ -310,16 +310,16 @@ impl<'self> Parser<'self> {
let ip_addr = |p: &mut Parser| p.read_ipv6_addr(); let ip_addr = |p: &mut Parser| p.read_ipv6_addr();
let clos_br = |p: &mut Parser| p.read_given_char(']'); let clos_br = |p: &mut Parser| p.read_given_char(']');
p.read_seq_3::<char, IpAddr, char>(open_br, ip_addr, clos_br) p.read_seq_3::<char, IpAddr, char>(open_br, ip_addr, clos_br)
.map(|&t| match t { (_, ip, _) => ip }) .map(|t| match t { (_, ip, _) => ip })
}; };
p.read_or([ipv4_p, ipv6_p]) p.read_or([ipv4_p, ipv6_p])
}; };
let colon = |p: &mut Parser| p.read_given_char(':'); let colon = |p: &mut Parser| p.read_given_char(':');
let port = |p: &mut Parser| p.read_number(10, 5, 0x10000).map(|&n| n as u16); let port = |p: &mut Parser| p.read_number(10, 5, 0x10000).map(|n| n as u16);
// host, colon, port // host, colon, port
self.read_seq_3::<IpAddr, char, u16>(ip_addr, colon, port) self.read_seq_3::<IpAddr, char, u16>(ip_addr, colon, port)
.map(|&t| match t { (ip, _, port) => SocketAddr { ip: ip, port: port } }) .map(|t| match t { (ip, _, port) => SocketAddr { ip: ip, port: port } })
} }
} }

View file

@ -93,7 +93,7 @@ impl Process {
Ok((p, io)) => Some(Process{ Ok((p, io)) => Some(Process{
handle: p, handle: p,
io: io.move_iter().map(|p| io: io.move_iter().map(|p|
p.map_move(|p| io::PipeStream::bind(p)) p.map(|p| io::PipeStream::bind(p))
).collect() ).collect()
}), }),
Err(ioerr) => { Err(ioerr) => {

View file

@ -58,7 +58,7 @@ mod test {
fn test_io_timer_sleep_simple() { fn test_io_timer_sleep_simple() {
do run_in_mt_newsched_task { do run_in_mt_newsched_task {
let timer = Timer::new(); let timer = Timer::new();
do timer.map_move |mut t| { t.sleep(1) }; do timer.map |mut t| { t.sleep(1) };
} }
} }

View file

@ -486,10 +486,10 @@ impl KillHandle {
|| { || {
// Prefer to check tombstones that were there first, // Prefer to check tombstones that were there first,
// being "more fair" at the expense of tail-recursion. // being "more fair" at the expense of tail-recursion.
others.take().map_move_default(true, |f| f()) && { others.take().map_default(true, |f| f()) && {
let mut inner = this.take().unwrap(); let mut inner = this.take().unwrap();
(!inner.any_child_failed) && (!inner.any_child_failed) &&
inner.child_tombstones.take().map_move_default(true, |f| f()) inner.child_tombstones.take().map_default(true, |f| f())
} }
} }
} }
@ -508,7 +508,7 @@ impl KillHandle {
let others = Cell::new(other_tombstones); // :( let others = Cell::new(other_tombstones); // :(
|| { || {
// Prefer fairness to tail-recursion, as in above case. // Prefer fairness to tail-recursion, as in above case.
others.take().map_move_default(true, |f| f()) && others.take().map_default(true, |f| f()) &&
f.take()() f.take()()
} }
} }
@ -577,7 +577,7 @@ impl Death {
{ use util; util::ignore(group); } { use util; util::ignore(group); }
// Step 1. Decide if we need to collect child failures synchronously. // Step 1. Decide if we need to collect child failures synchronously.
do self.on_exit.take().map_move |on_exit| { do self.on_exit.take().map |on_exit| {
if success { if success {
// We succeeded, but our children might not. Need to wait for them. // We succeeded, but our children might not. Need to wait for them.
let mut inner = self.kill_handle.take_unwrap().unwrap(); let mut inner = self.kill_handle.take_unwrap().unwrap();
@ -585,7 +585,7 @@ impl Death {
success = false; success = false;
} else { } else {
// Lockless access to tombstones protected by unwrap barrier. // Lockless access to tombstones protected by unwrap barrier.
success = inner.child_tombstones.take().map_move_default(true, |f| f()); success = inner.child_tombstones.take().map_default(true, |f| f());
} }
} }
on_exit(success); on_exit(success);
@ -594,12 +594,12 @@ impl Death {
// Step 2. Possibly alert possibly-watching parent to failure status. // Step 2. Possibly alert possibly-watching parent to failure status.
// Note that as soon as parent_handle goes out of scope, the parent // Note that as soon as parent_handle goes out of scope, the parent
// can successfully unwrap its handle and collect our reported status. // can successfully unwrap its handle and collect our reported status.
do self.watching_parent.take().map_move |mut parent_handle| { do self.watching_parent.take().map |mut parent_handle| {
if success { if success {
// Our handle might be None if we had an exit callback, and // Our handle might be None if we had an exit callback, and
// already unwrapped it. But 'success' being true means no // already unwrapped it. But 'success' being true means no
// child failed, so there's nothing to do (see below case). // child failed, so there's nothing to do (see below case).
do self.kill_handle.take().map_move |own_handle| { do self.kill_handle.take().map |own_handle| {
own_handle.reparent_children_to(&mut parent_handle); own_handle.reparent_children_to(&mut parent_handle);
}; };
} else { } else {

View file

@ -538,7 +538,7 @@ impl Scheduler {
/// As enqueue_task, but with the possibility for the blocked task to /// As enqueue_task, but with the possibility for the blocked task to
/// already have been killed. /// already have been killed.
pub fn enqueue_blocked_task(&mut self, blocked_task: BlockedTask) { pub fn enqueue_blocked_task(&mut self, blocked_task: BlockedTask) {
do blocked_task.wake().map_move |task| { do blocked_task.wake().map |task| {
self.enqueue_task(task); self.enqueue_task(task);
}; };
} }

View file

@ -485,10 +485,10 @@ mod test {
do run_in_newsched_task() { do run_in_newsched_task() {
local_data_key!(key: @~str) local_data_key!(key: @~str)
local_data::set(key, @~"data"); local_data::set(key, @~"data");
assert!(*local_data::get(key, |k| k.map_move(|k| *k)).unwrap() == ~"data"); assert!(*local_data::get(key, |k| k.map(|k| *k)).unwrap() == ~"data");
local_data_key!(key2: @~str) local_data_key!(key2: @~str)
local_data::set(key2, @~"data"); local_data::set(key2, @~"data");
assert!(*local_data::get(key2, |k| k.map_move(|k| *k)).unwrap() == ~"data"); assert!(*local_data::get(key2, |k| k.map(|k| *k)).unwrap() == ~"data");
} }
} }

View file

@ -46,7 +46,7 @@ impl Process {
exit_cb: uv::ExitCallback) exit_cb: uv::ExitCallback)
-> Result<~[Option<UvPipeStream>], uv::UvError> -> Result<~[Option<UvPipeStream>], uv::UvError>
{ {
let cwd = config.cwd.map_move(|s| s.to_c_str()); let cwd = config.cwd.map(|s| s.to_c_str());
extern fn on_exit(p: *uvll::uv_process_t, extern fn on_exit(p: *uvll::uv_process_t,
exit_status: libc::c_int, exit_status: libc::c_int,

View file

@ -74,7 +74,7 @@ trait HomingIO {
* *
* RESOLUTION IDEA: Since the task is dead, we should just abort the IO action. * RESOLUTION IDEA: Since the task is dead, we should just abort the IO action.
*/ */
do task.wake().map_move |mut task| { do task.wake().map |mut task| {
*ptr = Some(task.take_unwrap_home()); *ptr = Some(task.take_unwrap_home());
self.home().send(PinnedTask(task)); self.home().send(PinnedTask(task));
}; };
@ -97,7 +97,7 @@ trait HomingIO {
* *
* RESOLUTION IDEA: Since the task is dead, we should just abort the IO action. * RESOLUTION IDEA: Since the task is dead, we should just abort the IO action.
*/ */
do task.wake().map_move |mut task| { do task.wake().map |mut task| {
task.give_home(old.take()); task.give_home(old.take());
scheduler.make_handle().send(TaskFromFriend(task)); scheduler.make_handle().send(TaskFromFriend(task));
}; };
@ -1672,7 +1672,7 @@ fn test_simple_homed_udp_io_bind_then_move_task_then_home_and_close() {
let scheduler: ~Scheduler = Local::take(); let scheduler: ~Scheduler = Local::take();
do scheduler.deschedule_running_task_and_then |_, task| { do scheduler.deschedule_running_task_and_then |_, task| {
// unblock task // unblock task
do task.wake().map_move |task| { do task.wake().map |task| {
// send self to sched2 // send self to sched2
tasksFriendHandle.take().send(TaskFromFriend(task)); tasksFriendHandle.take().send(TaskFromFriend(task));
}; };

View file

@ -415,7 +415,7 @@ impl<'self> Iterator<(uint, char)> for CharOffsetIterator<'self> {
b as uint - a as uint b as uint - a as uint
} }
}; };
self.iter.next().map_move(|ch| (offset, ch)) self.iter.next().map(|ch| (offset, ch))
} }
#[inline] #[inline]
@ -427,7 +427,7 @@ impl<'self> Iterator<(uint, char)> for CharOffsetIterator<'self> {
impl<'self> DoubleEndedIterator<(uint, char)> for CharOffsetIterator<'self> { impl<'self> DoubleEndedIterator<(uint, char)> for CharOffsetIterator<'self> {
#[inline] #[inline]
fn next_back(&mut self) -> Option<(uint, char)> { fn next_back(&mut self) -> Option<(uint, char)> {
self.iter.next_back().map_move(|ch| { self.iter.next_back().map(|ch| {
let offset = do self.string.as_imm_buf |a, _| { let offset = do self.string.as_imm_buf |a, _| {
do self.iter.string.as_imm_buf |b, len| { do self.iter.string.as_imm_buf |b, len| {
b as uint - a as uint + len b as uint - a as uint + len
@ -2260,7 +2260,7 @@ impl<'self> StrSlice<'self> for &'self str {
} else { } else {
self.matches_index_iter(needle) self.matches_index_iter(needle)
.next() .next()
.map_move(|(start, _end)| start) .map(|(start, _end)| start)
} }
} }

View file

@ -142,7 +142,7 @@ pub fn begin_unwind_(msg: *c_char, file: *c_char, line: size_t) -> ! {
// Be careful not to allocate in this block, if we're failing we may // Be careful not to allocate in this block, if we're failing we may
// have been failing due to a lack of memory in the first place... // have been failing due to a lack of memory in the first place...
do Local::borrow |task: &mut Task| { do Local::borrow |task: &mut Task| {
let n = task.name.map(|n| n.as_slice()).unwrap_or("<unnamed>"); let n = task.name.as_ref().map(|n| n.as_slice()).unwrap_or("<unnamed>");
format_args!(|args| { task.logger.log(args) }, format_args!(|args| { task.logger.log(args) },
"task '{}' failed at '{}', {}:{}", "task '{}' failed at '{}', {}:{}",
n, msg.as_slice(), file.as_slice(), line); n, msg.as_slice(), file.as_slice(), line);

View file

@ -182,7 +182,7 @@ fn check_generation(_younger: uint, _older: uint) { }
#[inline] #[cfg(test)] #[inline] #[cfg(test)]
fn incr_generation(ancestors: &AncestorList) -> uint { fn incr_generation(ancestors: &AncestorList) -> uint {
ancestors.map_default(0, |arc| access_ancestors(arc, |a| a.generation+1)) ancestors.as_ref().map_default(0, |arc| access_ancestors(arc, |a| a.generation+1))
} }
#[inline] #[cfg(not(test))] #[inline] #[cfg(not(test))]
fn incr_generation(_ancestors: &AncestorList) -> uint { 0 } fn incr_generation(_ancestors: &AncestorList) -> uint { 0 }
@ -243,7 +243,7 @@ fn each_ancestor(list: &mut AncestorList,
// The map defaults to None, because if ancestors is None, we're at // The map defaults to None, because if ancestors is None, we're at
// the end of the list, which doesn't make sense to coalesce. // the end of the list, which doesn't make sense to coalesce.
do ancestors.map_default((None,false)) |ancestor_arc| { do ancestors.as_ref().map_default((None,false)) |ancestor_arc| {
// NB: Takes a lock! (this ancestor node) // NB: Takes a lock! (this ancestor node)
do access_ancestors(ancestor_arc) |nobe| { do access_ancestors(ancestor_arc) |nobe| {
// Argh, but we couldn't give it to coalesce() otherwise. // Argh, but we couldn't give it to coalesce() otherwise.
@ -386,7 +386,7 @@ fn enlist_in_taskgroup(state: TaskGroupInner, me: KillHandle,
is_member: bool) -> bool { is_member: bool) -> bool {
let me = Cell::new(me); // :( let me = Cell::new(me); // :(
// If 'None', the group was failing. Can't enlist. // If 'None', the group was failing. Can't enlist.
do state.map_mut_default(false) |group| { do state.as_mut().map_default(false) |group| {
(if is_member { (if is_member {
&mut group.members &mut group.members
} else { } else {
@ -400,7 +400,7 @@ fn enlist_in_taskgroup(state: TaskGroupInner, me: KillHandle,
fn leave_taskgroup(state: TaskGroupInner, me: &KillHandle, is_member: bool) { fn leave_taskgroup(state: TaskGroupInner, me: &KillHandle, is_member: bool) {
let me = Cell::new(me); // :( let me = Cell::new(me); // :(
// If 'None', already failing and we've already gotten a kill signal. // If 'None', already failing and we've already gotten a kill signal.
do state.map_mut |group| { do state.as_mut().map |group| {
(if is_member { (if is_member {
&mut group.members &mut group.members
} else { } else {
@ -414,7 +414,7 @@ fn kill_taskgroup(state: Option<TaskGroupData>, me: &KillHandle) {
// Might already be None, if somebody is failing simultaneously. // Might already be None, if somebody is failing simultaneously.
// That's ok; only one task needs to do the dirty work. (Might also // That's ok; only one task needs to do the dirty work. (Might also
// see 'None' if somebody already failed and we got a kill signal.) // see 'None' if somebody already failed and we got a kill signal.)
do state.map_move |TaskGroupData { members: members, descendants: descendants }| { do state.map |TaskGroupData { members: members, descendants: descendants }| {
for sibling in members.move_iter() { for sibling in members.move_iter() {
// Skip self - killing ourself won't do much good. // Skip self - killing ourself won't do much good.
if &sibling != me { if &sibling != me {
@ -439,7 +439,7 @@ fn taskgroup_key() -> local_data::Key<@@mut Taskgroup> {
struct RuntimeGlue; struct RuntimeGlue;
impl RuntimeGlue { impl RuntimeGlue {
fn kill_task(mut handle: KillHandle) { fn kill_task(mut handle: KillHandle) {
do handle.kill().map_move |killed_task| { do handle.kill().map |killed_task| {
let killed_task = Cell::new(killed_task); let killed_task = Cell::new(killed_task);
do Local::borrow |sched: &mut Scheduler| { do Local::borrow |sched: &mut Scheduler| {
sched.enqueue_task(killed_task.take()); sched.enqueue_task(killed_task.take());
@ -491,7 +491,7 @@ fn gen_child_taskgroup(linked: bool, supervised: bool)
// with_my_taskgroup will lazily initialize the parent's taskgroup if // with_my_taskgroup will lazily initialize the parent's taskgroup if
// it doesn't yet exist. We don't want to call it in the unlinked case. // it doesn't yet exist. We don't want to call it in the unlinked case.
do RuntimeGlue::with_my_taskgroup |spawner_group| { do RuntimeGlue::with_my_taskgroup |spawner_group| {
let ancestors = AncestorList(spawner_group.ancestors.map(|x| x.clone())); let ancestors = AncestorList(spawner_group.ancestors.as_ref().map(|x| x.clone()));
if linked { if linked {
// Child is in the same group as spawner. // Child is in the same group as spawner.
// Child's ancestors are spawner's ancestors. // Child's ancestors are spawner's ancestors.
@ -562,7 +562,7 @@ pub fn spawn_raw(mut opts: TaskOpts, f: ~fn()) {
// Child task runs this code. // Child task runs this code.
// If child data is 'None', the enlist is vacuously successful. // If child data is 'None', the enlist is vacuously successful.
let enlist_success = do child_data.take().map_move_default(true) |child_data| { let enlist_success = do child_data.take().map_default(true) |child_data| {
let child_data = Cell::new(child_data); // :( let child_data = Cell::new(child_data); // :(
do Local::borrow |me: &mut Task| { do Local::borrow |me: &mut Task| {
let (child_tg, ancestors) = child_data.take(); let (child_tg, ancestors) = child_data.take();

View file

@ -488,7 +488,7 @@ pub struct TrieSetIterator<'self> {
impl<'self> Iterator<uint> for TrieSetIterator<'self> { impl<'self> Iterator<uint> for TrieSetIterator<'self> {
fn next(&mut self) -> Option<uint> { fn next(&mut self) -> Option<uint> {
do self.iter.next().map |&(key, _)| { key } do self.iter.next().map |(key, _)| { key }
} }
fn size_hint(&self) -> (uint, Option<uint>) { fn size_hint(&self) -> (uint, Option<uint>) {

View file

@ -588,7 +588,7 @@ impl<'self, O: IdVisitingOperation> Visitor<()> for IdVisitor<'self, O> {
id: NodeId, id: NodeId,
_: ()) { _: ()) {
self.operation.visit_id(id); self.operation.visit_id(id);
struct_def.ctor_id.map(|&ctor_id| self.operation.visit_id(ctor_id)); struct_def.ctor_id.map(|ctor_id| self.operation.visit_id(ctor_id));
visit::walk_struct_def(self, struct_def, ident, generics, id, ()); visit::walk_struct_def(self, struct_def, ident, generics, id, ());
} }
@ -783,7 +783,7 @@ pub fn new_sctable_internal() -> SCTable {
// fetch the SCTable from TLS, create one if it doesn't yet exist. // fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn get_sctable() -> @mut SCTable { pub fn get_sctable() -> @mut SCTable {
local_data_key!(sctable_key: @@mut SCTable) local_data_key!(sctable_key: @@mut SCTable)
match local_data::get(sctable_key, |k| k.map_move(|k| *k)) { match local_data::get(sctable_key, |k| k.map(|k| *k)) {
None => { None => {
let new_table = @@mut new_sctable_internal(); let new_table = @@mut new_sctable_internal();
local_data::set(sctable_key,new_table); local_data::set(sctable_key,new_table);
@ -820,7 +820,7 @@ pub type ResolveTable = HashMap<(Name,SyntaxContext),Name>;
// fetch the SCTable from TLS, create one if it doesn't yet exist. // fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn get_resolve_table() -> @mut ResolveTable { pub fn get_resolve_table() -> @mut ResolveTable {
local_data_key!(resolve_table_key: @@mut ResolveTable) local_data_key!(resolve_table_key: @@mut ResolveTable)
match local_data::get(resolve_table_key, |k| k.map(|&k| *k)) { match local_data::get(resolve_table_key, |k| k.map(|k| *k)) {
None => { None => {
let new_table = @@mut HashMap::new(); let new_table = @@mut HashMap::new();
local_data::set(resolve_table_key,new_table); local_data::set(resolve_table_key,new_table);

View file

@ -83,7 +83,7 @@ impl AttrMetaMethods for MetaItem {
} }
fn name_str_pair(&self) -> Option<(@str, @str)> { fn name_str_pair(&self) -> Option<(@str, @str)> {
self.value_str().map_move(|s| (self.name(), s)) self.value_str().map(|s| (self.name(), s))
} }
} }

View file

@ -198,7 +198,7 @@ fn print_maybe_styled(msg: &str, color: term::attr::Attr) {
let stderr = io::stderr(); let stderr = io::stderr();
if stderr.get_type() == io::Screen { if stderr.get_type() == io::Screen {
let t = match local_data::get(tls_terminal, |v| v.map_move(|k| *k)) { let t = match local_data::get(tls_terminal, |v| v.map(|k| *k)) {
None => { None => {
let t = term::Terminal::new(stderr); let t = term::Terminal::new(stderr);
let tls = @match t { let tls = @match t {
@ -337,7 +337,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
fn print_macro_backtrace(cm: @codemap::CodeMap, sp: Span) { fn print_macro_backtrace(cm: @codemap::CodeMap, sp: Span) {
for ei in sp.expn_info.iter() { for ei in sp.expn_info.iter() {
let ss = ei.callee.span.map_default(~"", |span| cm.span_to_str(*span)); let ss = ei.callee.span.as_ref().map_default(~"", |span| cm.span_to_str(*span));
print_diagnostic(ss, note, print_diagnostic(ss, note,
format!("in expansion of {}!", ei.callee.name)); format!("in expansion of {}!", ei.callee.name));
let ss = cm.span_to_str(ei.call_site); let ss = cm.span_to_str(ei.call_site);

View file

@ -565,7 +565,7 @@ impl <K: Eq + Hash + IterBytes + 'static, V: 'static> MapChain<K,V>{
ConsMapChain(ref map,_) => map ConsMapChain(ref map,_) => map
}; };
// strip one layer of indirection off the pointer. // strip one layer of indirection off the pointer.
map.find(key).map_move(|r| {*r}) map.find(key).map(|r| {*r})
} }
// insert the binding into the top-level map // insert the binding into the top-level map

View file

@ -663,7 +663,7 @@ impl AstBuilder for @ExtCtxt {
fn expr_if(&self, span: Span, fn expr_if(&self, span: Span,
cond: @ast::Expr, then: @ast::Expr, els: Option<@ast::Expr>) -> @ast::Expr { cond: @ast::Expr, then: @ast::Expr, els: Option<@ast::Expr>) -> @ast::Expr {
let els = els.map_move(|x| self.expr_block(self.block_expr(x))); let els = els.map(|x| self.expr_block(self.block_expr(x)));
self.expr(span, ast::ExprIf(cond, self.block_expr(then), els)) self.expr(span, ast::ExprIf(cond, self.block_expr(then), els))
} }

View file

@ -249,7 +249,7 @@ pub fn get_explicit_self(cx: @ExtCtxt, span: Span, self_ptr: &Option<PtrTy>)
Send => ast::sty_uniq, Send => ast::sty_uniq,
Managed(mutbl) => ast::sty_box(mutbl), Managed(mutbl) => ast::sty_box(mutbl),
Borrowed(ref lt, mutbl) => { Borrowed(ref lt, mutbl) => {
let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(*s))); let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(s)));
ast::sty_region(lt, mutbl) ast::sty_region(lt, mutbl)
} }
}); });

View file

@ -582,7 +582,7 @@ fn expand_non_macro_stmt(exts: SyntaxEnv, s: &Stmt, fld: &MacroExpander)
// add them to the existing pending renames: // add them to the existing pending renames:
for pr in new_pending_renames.iter() {pending_renames.push(*pr)} for pr in new_pending_renames.iter() {pending_renames.push(*pr)}
// also, don't forget to expand the init: // also, don't forget to expand the init:
let new_init_opt = init.map(|e| fld.fold_expr(*e)); let new_init_opt = init.map(|e| fld.fold_expr(e));
let rewritten_local = let rewritten_local =
@Local { @Local {
is_mutbl: is_mutbl, is_mutbl: is_mutbl,
@ -725,7 +725,7 @@ pub fn expand_block_elts(exts: SyntaxEnv, b: &Block, fld: &MacroExpander)
None => () None => ()
} }
} }
let new_expr = b.expr.map(|x| fld.fold_expr(rename_fld.fold_expr(*x))); let new_expr = b.expr.map(|x| fld.fold_expr(rename_fld.fold_expr(x)));
Block{ Block{
view_items: new_view_items, view_items: new_view_items,
stmts: new_stmts, stmts: new_stmts,

View file

@ -266,7 +266,7 @@ impl Context {
} }
}; };
self.verify_same(span, ty, self.verify_same(span, ty,
self.name_types.find(&name).map(|&x| *x)); self.name_types.find(&name).map(|&x| x));
if !self.name_types.contains_key(&name) { if !self.name_types.contains_key(&name) {
self.name_types.insert(name, ty); self.name_types.insert(name, ty);
} }

View file

@ -165,7 +165,7 @@ pub trait ast_fold {
fn fold_arm(&self, a: &Arm) -> Arm { fn fold_arm(&self, a: &Arm) -> Arm {
Arm { Arm {
pats: a.pats.map(|x| self.fold_pat(*x)), pats: a.pats.map(|x| self.fold_pat(*x)),
guard: a.guard.map_move(|x| self.fold_expr(x)), guard: a.guard.map(|x| self.fold_expr(x)),
body: self.fold_block(&a.body), body: self.fold_block(&a.body),
} }
} }
@ -176,12 +176,12 @@ pub trait ast_fold {
PatIdent(binding_mode, ref pth, ref sub) => { PatIdent(binding_mode, ref pth, ref sub) => {
PatIdent(binding_mode, PatIdent(binding_mode,
self.fold_path(pth), self.fold_path(pth),
sub.map_move(|x| self.fold_pat(x))) sub.map(|x| self.fold_pat(x)))
} }
PatLit(e) => PatLit(self.fold_expr(e)), PatLit(e) => PatLit(self.fold_expr(e)),
PatEnum(ref pth, ref pats) => { PatEnum(ref pth, ref pats) => {
PatEnum(self.fold_path(pth), PatEnum(self.fold_path(pth),
pats.map(|pats| pats.map(|x| self.fold_pat(*x)))) pats.as_ref().map(|pats| pats.map(|x| self.fold_pat(*x))))
} }
PatStruct(ref pth, ref fields, etc) => { PatStruct(ref pth, ref fields, etc) => {
let pth_ = self.fold_path(pth); let pth_ = self.fold_path(pth);
@ -202,7 +202,7 @@ pub trait ast_fold {
}, },
PatVec(ref before, ref slice, ref after) => { PatVec(ref before, ref slice, ref after) => {
PatVec(before.map(|x| self.fold_pat(*x)), PatVec(before.map(|x| self.fold_pat(*x)),
slice.map_move(|x| self.fold_pat(x)), slice.map(|x| self.fold_pat(x)),
after.map(|x| self.fold_pat(*x))) after.map(|x| self.fold_pat(*x)))
} }
}; };
@ -225,7 +225,7 @@ pub trait ast_fold {
} }
}; };
node.map_move(|node| { node.map(|node| {
@Spanned { @Spanned {
node: node, node: node,
span: d.span, span: d.span,
@ -313,7 +313,7 @@ pub trait ast_fold {
kind = struct_variant_kind(@ast::struct_def { kind = struct_variant_kind(@ast::struct_def {
fields: struct_def.fields.iter() fields: struct_def.fields.iter()
.map(|f| self.fold_struct_field(*f)).collect(), .map(|f| self.fold_struct_field(*f)).collect(),
ctor_id: struct_def.ctor_id.map(|c| self.new_id(*c)) ctor_id: struct_def.ctor_id.map(|c| self.new_id(c))
}) })
} }
} }
@ -360,7 +360,7 @@ pub trait ast_fold {
is_mutbl: l.is_mutbl, is_mutbl: l.is_mutbl,
ty: self.fold_ty(&l.ty), ty: self.fold_ty(&l.ty),
pat: self.fold_pat(l.pat), pat: self.fold_pat(l.pat),
init: l.init.map_move(|e| self.fold_expr(e)), init: l.init.map(|e| self.fold_expr(e)),
id: self.new_id(l.id), id: self.new_id(l.id),
span: self.new_span(l.span), span: self.new_span(l.span),
} }
@ -445,7 +445,7 @@ pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] {
tt_seq(span, ref pattern, ref sep, is_optional) => tt_seq(span, ref pattern, ref sep, is_optional) =>
tt_seq(span, tt_seq(span,
@mut fold_tts(**pattern, fld), @mut fold_tts(**pattern, fld),
sep.map(|tok|maybe_fold_ident(tok,fld)), sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)),
is_optional), is_optional),
tt_nonterminal(sp,ref ident) => tt_nonterminal(sp,ref ident) =>
tt_nonterminal(sp,fld.fold_ident(*ident)) tt_nonterminal(sp,fld.fold_ident(*ident))
@ -515,7 +515,7 @@ fn fold_struct_def<T:ast_fold>(struct_def: @ast::struct_def, fld: &T)
-> @ast::struct_def { -> @ast::struct_def {
@ast::struct_def { @ast::struct_def {
fields: struct_def.fields.map(|f| fold_struct_field(*f, fld)), fields: struct_def.fields.map(|f| fold_struct_field(*f, fld)),
ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(*cid)), ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(cid)),
} }
} }
@ -577,7 +577,7 @@ fn fold_field<T:ast_fold>(f: TypeField, folder: &T) -> TypeField {
fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T) fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T)
-> Option<OptVec<TyParamBound>> { -> Option<OptVec<TyParamBound>> {
do b.map |bounds| { do b.as_ref().map |bounds| {
do bounds.map |bound| { do bounds.map |bound| {
fold_ty_param_bound(bound, folder) fold_ty_param_bound(bound, folder)
} }
@ -604,7 +604,7 @@ pub fn noop_fold_block<T:ast_fold>(b: &Block, folder: &T) -> Block {
ast::Block { ast::Block {
view_items: view_items, view_items: view_items,
stmts: stmts, stmts: stmts,
expr: b.expr.map(|x| folder.fold_expr(*x)), expr: b.expr.map(|x| folder.fold_expr(x)),
id: folder.new_id(b.id), id: folder.new_id(b.id),
rules: b.rules, rules: b.rules,
span: folder.new_span(b.span), span: folder.new_span(b.span),
@ -648,7 +648,7 @@ pub fn noop_fold_item_underscore<T:ast_fold>(i: &item_, folder: &T) -> item_ {
} }
item_impl(ref generics, ref ifce, ref ty, ref methods) => { item_impl(ref generics, ref ifce, ref ty, ref methods) => {
item_impl(fold_generics(generics, folder), item_impl(fold_generics(generics, folder),
ifce.map(|p| fold_trait_ref(p, folder)), ifce.as_ref().map(|p| fold_trait_ref(p, folder)),
folder.fold_ty(ty), folder.fold_ty(ty),
methods.map(|x| folder.fold_method(*x)) methods.map(|x| folder.fold_method(*x))
) )
@ -764,7 +764,7 @@ pub fn noop_fold_expr<T:ast_fold>(e: @ast::Expr, folder: &T) -> @ast::Expr {
ExprIf(cond, ref tr, fl) => { ExprIf(cond, ref tr, fl) => {
ExprIf(folder.fold_expr(cond), ExprIf(folder.fold_expr(cond),
folder.fold_block(tr), folder.fold_block(tr),
fl.map_move(|x| folder.fold_expr(x))) fl.map(|x| folder.fold_expr(x)))
} }
ExprWhile(cond, ref body) => { ExprWhile(cond, ref body) => {
ExprWhile(folder.fold_expr(cond), folder.fold_block(body)) ExprWhile(folder.fold_expr(cond), folder.fold_block(body))
@ -773,11 +773,11 @@ pub fn noop_fold_expr<T:ast_fold>(e: @ast::Expr, folder: &T) -> @ast::Expr {
ExprForLoop(folder.fold_pat(pat), ExprForLoop(folder.fold_pat(pat),
folder.fold_expr(iter), folder.fold_expr(iter),
folder.fold_block(body), folder.fold_block(body),
maybe_ident.map_move(|i| folder.fold_ident(i))) maybe_ident.map(|i| folder.fold_ident(i)))
} }
ExprLoop(ref body, opt_ident) => { ExprLoop(ref body, opt_ident) => {
ExprLoop(folder.fold_block(body), ExprLoop(folder.fold_block(body),
opt_ident.map_move(|x| folder.fold_ident(x))) opt_ident.map(|x| folder.fold_ident(x)))
} }
ExprMatch(expr, ref arms) => { ExprMatch(expr, ref arms) => {
ExprMatch(folder.fold_expr(expr), ExprMatch(folder.fold_expr(expr),
@ -814,7 +814,7 @@ pub fn noop_fold_expr<T:ast_fold>(e: @ast::Expr, folder: &T) -> @ast::Expr {
ExprBreak(opt_ident) => ExprBreak(opt_ident), ExprBreak(opt_ident) => ExprBreak(opt_ident),
ExprAgain(opt_ident) => ExprAgain(opt_ident), ExprAgain(opt_ident) => ExprAgain(opt_ident),
ExprRet(ref e) => { ExprRet(ref e) => {
ExprRet(e.map_move(|x| folder.fold_expr(x))) ExprRet(e.map(|x| folder.fold_expr(x)))
} }
ExprInlineAsm(ref a) => { ExprInlineAsm(ref a) => {
ExprInlineAsm(inline_asm { ExprInlineAsm(inline_asm {
@ -827,7 +827,7 @@ pub fn noop_fold_expr<T:ast_fold>(e: @ast::Expr, folder: &T) -> @ast::Expr {
ExprStruct(ref path, ref fields, maybe_expr) => { ExprStruct(ref path, ref fields, maybe_expr) => {
ExprStruct(folder.fold_path(path), ExprStruct(folder.fold_path(path),
fields.map(|x| fold_field(*x)), fields.map(|x| fold_field(*x)),
maybe_expr.map_move(|x| folder.fold_expr(x))) maybe_expr.map(|x| folder.fold_expr(x)))
}, },
ExprParen(ex) => ExprParen(folder.fold_expr(ex)) ExprParen(ex) => ExprParen(folder.fold_expr(ex))
}; };
@ -856,7 +856,7 @@ pub fn noop_fold_stmt<T:ast_fold>(s: &Stmt, folder: &T) -> Option<@Stmt> {
StmtMac(ref mac, semi) => Some(StmtMac(folder.fold_mac(mac), semi)) StmtMac(ref mac, semi) => Some(StmtMac(folder.fold_mac(mac), semi))
}; };
node.map_move(|node| @Spanned { node.map(|node| @Spanned {
node: node, node: node,
span: folder.new_span(s.span), span: folder.new_span(s.span),
}) })

View file

@ -405,7 +405,7 @@ impl Parser {
fn tokens_to_str(p:&Parser, tokens: &[token::Token]) -> ~str { fn tokens_to_str(p:&Parser, tokens: &[token::Token]) -> ~str {
let mut i = tokens.iter(); let mut i = tokens.iter();
// This might be a sign we need a connect method on Iterator. // This might be a sign we need a connect method on Iterator.
let b = i.next().map_default(~"", |t| p.token_to_str(*t)); let b = i.next().map_default(~"", |t| p.token_to_str(t));
i.fold(b, |b,a| b + "`, `" + p.token_to_str(a)) i.fold(b, |b,a| b + "`, `" + p.token_to_str(a))
} }
if edible.contains(self.token) { if edible.contains(self.token) {
@ -470,7 +470,7 @@ impl Parser {
pub fn commit_stmt(&self, s: @Stmt, edible: &[token::Token], inedible: &[token::Token]) { pub fn commit_stmt(&self, s: @Stmt, edible: &[token::Token], inedible: &[token::Token]) {
debug2!("commit_stmt {:?}", s); debug2!("commit_stmt {:?}", s);
let _s = s; // unused, but future checks might want to inspect `s`. let _s = s; // unused, but future checks might want to inspect `s`.
if self.last_token.map_default(false, |t|is_ident_or_path(*t)) { if self.last_token.as_ref().map_default(false, |t| is_ident_or_path(*t)) {
let expected = vec::append(edible.to_owned(), inedible); let expected = vec::append(edible.to_owned(), inedible);
self.check_for_erroneous_unit_struct_expecting(expected); self.check_for_erroneous_unit_struct_expecting(expected);
} }

View file

@ -510,7 +510,7 @@ static RESERVED_KEYWORD_FINAL: uint = 71;
// fresh one. // fresh one.
pub fn get_ident_interner() -> @ident_interner { pub fn get_ident_interner() -> @ident_interner {
local_data_key!(key: @@::parse::token::ident_interner) local_data_key!(key: @@::parse::token::ident_interner)
match local_data::get(key, |k| k.map_move(|k| *k)) { match local_data::get(key, |k| k.map(|k| *k)) {
Some(interner) => *interner, Some(interner) => *interner,
None => { None => {
let interner = mk_fresh_ident_interner(); let interner = mk_fresh_ident_interner();

View file

@ -1978,7 +1978,7 @@ pub fn print_ty_fn(s: @ps,
print_onceness(s, onceness); print_onceness(s, onceness);
word(s.s, "fn"); word(s.s, "fn");
match id { Some(id) => { word(s.s, " "); print_ident(s, id); } _ => () } match id { Some(id) => { word(s.s, " "); print_ident(s, id); } _ => () }
do opt_bounds.map |bounds| { print_bounds(s, bounds, true); }; do opt_bounds.as_ref().map |bounds| { print_bounds(s, bounds, true); };
match generics { Some(g) => print_generics(s, g), _ => () } match generics { Some(g) => print_generics(s, g), _ => () }
zerobreak(s.s); zerobreak(s.s);