auto merge of #8294 : erickt/rust/map-move, r=bblum
According to #7887, we've decided to use the syntax of `fn map<U>(f: &fn(&T) -> U) -> U`, which passes a reference to the closure, and to `fn map_move<U>(f: &fn(T) -> U) -> U` which moves the value into the closure. This PR adds these `.map_move()` functions to `Option` and `Result`. In addition, it has these other minor features: * Replaces a couple uses of `option.get()`, `result.get()`, and `result.get_err()` with `option.unwrap()`, `result.unwrap()`, and `result.unwrap_err()`. (See #8268 and #8288 for a more thorough adaptation of this functionality. * Removes `option.take_map()` and `option.take_map_default()`. These two functions can be easily written as `.take().map_move(...)`. * Adds a better error message to `result.unwrap()` and `result.unwrap_err()`.
This commit is contained in:
commit
98ec79c957
@ -109,8 +109,8 @@ pub fn parse_config(args: ~[~str]) -> config {
|
||||
compile_lib_path: getopts::opt_str(matches, "compile-lib-path"),
|
||||
run_lib_path: getopts::opt_str(matches, "run-lib-path"),
|
||||
rustc_path: opt_path(matches, "rustc-path"),
|
||||
clang_path: getopts::opt_maybe_str(matches, "clang-path").map(|s| Path(*s)),
|
||||
llvm_bin_path: getopts::opt_maybe_str(matches, "llvm-bin-path").map(|s| Path(*s)),
|
||||
clang_path: getopts::opt_maybe_str(matches, "clang-path").map_move(|s| Path(s)),
|
||||
llvm_bin_path: getopts::opt_maybe_str(matches, "llvm-bin-path").map_move(|s| Path(s)),
|
||||
src_base: opt_path(matches, "src-base"),
|
||||
build_base: opt_path(matches, "build-base"),
|
||||
aux_base: opt_path(matches, "aux-base"),
|
||||
@ -123,14 +123,14 @@ pub fn parse_config(args: ~[~str]) -> config {
|
||||
} else {
|
||||
None
|
||||
},
|
||||
logfile: getopts::opt_maybe_str(matches, "logfile").map(|s| Path(*s)),
|
||||
save_metrics: getopts::opt_maybe_str(matches, "save-metrics").map(|s| Path(*s)),
|
||||
logfile: getopts::opt_maybe_str(matches, "logfile").map_move(|s| Path(s)),
|
||||
save_metrics: getopts::opt_maybe_str(matches, "save-metrics").map_move(|s| Path(s)),
|
||||
ratchet_metrics:
|
||||
getopts::opt_maybe_str(matches, "ratchet-metrics").map(|s| Path(*s)),
|
||||
getopts::opt_maybe_str(matches, "ratchet-metrics").map_move(|s| Path(s)),
|
||||
ratchet_noise_percent:
|
||||
getopts::opt_maybe_str(matches,
|
||||
"ratchet-noise-percent").map(|s|
|
||||
f64::from_str(*s).unwrap()),
|
||||
"ratchet-noise-percent").map_move(|s|
|
||||
f64::from_str(s).unwrap()),
|
||||
runtool: getopts::opt_maybe_str(matches, "runtool"),
|
||||
rustcflags: getopts::opt_maybe_str(matches, "rustcflags"),
|
||||
jit: getopts::opt_present(matches, "jit"),
|
||||
|
@ -162,9 +162,8 @@ fn run_pretty_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||
round += 1;
|
||||
}
|
||||
|
||||
let mut expected =
|
||||
match props.pp_exact {
|
||||
Some(ref file) => {
|
||||
let mut expected = match props.pp_exact {
|
||||
Some(ref file) => {
|
||||
let filepath = testfile.dir_path().push_rel(file);
|
||||
io::read_whole_file_str(&filepath).unwrap()
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ impl<T> DList<T> {
|
||||
/// Remove the first Node and return it, or None if the list is empty
|
||||
#[inline]
|
||||
fn pop_front_node(&mut self) -> Option<~Node<T>> {
|
||||
do self.list_head.take().map_consume |mut front_node| {
|
||||
do self.list_head.take().map_move |mut front_node| {
|
||||
self.length -= 1;
|
||||
match front_node.next.take() {
|
||||
Some(node) => self.list_head = link_with_prev(node, Rawlink::none()),
|
||||
@ -190,7 +190,7 @@ impl<T> DList<T> {
|
||||
/// Remove the last Node and return it, or None if the list is empty
|
||||
#[inline]
|
||||
fn pop_back_node(&mut self) -> Option<~Node<T>> {
|
||||
do self.list_tail.resolve().map_consume_default(None) |tail| {
|
||||
do self.list_tail.resolve().map_move_default(None) |tail| {
|
||||
self.length -= 1;
|
||||
self.list_tail = tail.prev;
|
||||
match tail.prev.resolve() {
|
||||
@ -237,7 +237,7 @@ impl<T> Deque<T> for DList<T> {
|
||||
///
|
||||
/// O(1)
|
||||
fn pop_front(&mut self) -> Option<T> {
|
||||
self.pop_front_node().map_consume(|~Node{value, _}| value)
|
||||
self.pop_front_node().map_move(|~Node{value, _}| value)
|
||||
}
|
||||
|
||||
/// Add an element last in the list
|
||||
@ -251,7 +251,7 @@ impl<T> Deque<T> for DList<T> {
|
||||
///
|
||||
/// O(1)
|
||||
fn pop_back(&mut self) -> Option<T> {
|
||||
self.pop_back_node().map_consume(|~Node{value, _}| value)
|
||||
self.pop_back_node().map_move(|~Node{value, _}| value)
|
||||
}
|
||||
}
|
||||
|
||||
@ -267,7 +267,7 @@ impl<T> DList<T> {
|
||||
/// If the list is empty, do nothing.
|
||||
#[inline]
|
||||
pub fn rotate_forward(&mut self) {
|
||||
do self.pop_back_node().map_consume |tail| {
|
||||
do self.pop_back_node().map_move |tail| {
|
||||
self.push_front_node(tail)
|
||||
};
|
||||
}
|
||||
@ -277,7 +277,7 @@ impl<T> DList<T> {
|
||||
/// If the list is empty, do nothing.
|
||||
#[inline]
|
||||
pub fn rotate_backward(&mut self) {
|
||||
do self.pop_front_node().map_consume |head| {
|
||||
do self.pop_front_node().map_move |head| {
|
||||
self.push_back_node(head)
|
||||
};
|
||||
}
|
||||
@ -463,7 +463,7 @@ impl<'self, A> DoubleEndedIterator<&'self A> for DListIterator<'self, A> {
|
||||
if self.nelem == 0 {
|
||||
return None;
|
||||
}
|
||||
do self.tail.resolve().map_consume |prev| {
|
||||
do self.tail.resolve().map_move |prev| {
|
||||
self.nelem -= 1;
|
||||
self.tail = prev.prev;
|
||||
&prev.value
|
||||
@ -477,7 +477,7 @@ impl<'self, A> Iterator<&'self mut A> for MutDListIterator<'self, A> {
|
||||
if self.nelem == 0 {
|
||||
return None;
|
||||
}
|
||||
do self.head.resolve().map_consume |next| {
|
||||
do self.head.resolve().map_move |next| {
|
||||
self.nelem -= 1;
|
||||
self.head = match next.next {
|
||||
Some(ref mut node) => Rawlink::some(&mut **node),
|
||||
@ -499,7 +499,7 @@ impl<'self, A> DoubleEndedIterator<&'self mut A> for MutDListIterator<'self, A>
|
||||
if self.nelem == 0 {
|
||||
return None;
|
||||
}
|
||||
do self.tail.resolve().map_consume |prev| {
|
||||
do self.tail.resolve().map_move |prev| {
|
||||
self.nelem -= 1;
|
||||
self.tail = prev.prev;
|
||||
&mut prev.value
|
||||
@ -553,7 +553,7 @@ impl<'self, A> ListInsertion<A> for MutDListIterator<'self, A> {
|
||||
if self.nelem == 0 {
|
||||
return None
|
||||
}
|
||||
self.head.resolve().map_consume(|head| &mut head.value)
|
||||
self.head.resolve().map_move(|head| &mut head.value)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -548,7 +548,7 @@ impl BigUint {
|
||||
|
||||
pub fn new(v: ~[BigDigit]) -> BigUint {
|
||||
// omit trailing zeros
|
||||
let new_len = v.rposition(|n| *n != 0).map_default(0, |p| *p + 1);
|
||||
let new_len = v.rposition(|n| *n != 0).map_move_default(0, |p| p + 1);
|
||||
|
||||
if new_len == v.len() { return BigUint { data: v }; }
|
||||
let mut v = v;
|
||||
@ -1145,7 +1145,7 @@ impl BigInt {
|
||||
start = 1;
|
||||
}
|
||||
return BigUint::parse_bytes(buf.slice(start, buf.len()), radix)
|
||||
.map_consume(|bu| BigInt::from_biguint(sign, bu));
|
||||
.map_move(|bu| BigInt::from_biguint(sign, bu));
|
||||
}
|
||||
|
||||
pub fn to_uint(&self) -> uint {
|
||||
@ -2028,7 +2028,7 @@ mod bigint_tests {
|
||||
#[test]
|
||||
fn test_from_str_radix() {
|
||||
fn check(s: &str, ans: Option<int>) {
|
||||
let ans = ans.map(|&n| IntConvertible::from_int::<BigInt>(n));
|
||||
let ans = ans.map_move(|n| IntConvertible::from_int::<BigInt>(n));
|
||||
assert_eq!(FromStrRadix::from_str_radix(s, 10), ans);
|
||||
}
|
||||
check("10", Some(10));
|
||||
|
@ -203,7 +203,7 @@ impl<V> SmallIntMap<V> {
|
||||
{
|
||||
let values = replace(&mut self.v, ~[]);
|
||||
values.consume_iter().enumerate().filter_map(|(i, v)| {
|
||||
v.map_consume(|v| (i, v))
|
||||
v.map_move(|v| (i, v))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ impl Terminal {
|
||||
let inf = ti.unwrap();
|
||||
let nc = if inf.strings.find_equiv(&("setaf")).is_some()
|
||||
&& inf.strings.find_equiv(&("setab")).is_some() {
|
||||
inf.numbers.find_equiv(&("colors")).map_consume_default(0, |&n| n)
|
||||
inf.numbers.find_equiv(&("colors")).map_move_default(0, |&n| n)
|
||||
} else { 0 };
|
||||
|
||||
return Ok(Terminal {out: out, ti: inf, num_colors: nc});
|
||||
@ -220,7 +220,7 @@ impl Terminal {
|
||||
cap = self.ti.strings.find_equiv(&("op"));
|
||||
}
|
||||
}
|
||||
let s = do cap.map_consume_default(Err(~"can't find terminfo capability `sgr0`")) |op| {
|
||||
let s = do cap.map_move_default(Err(~"can't find terminfo capability `sgr0`")) |op| {
|
||||
expand(*op, [], &mut Variables::new())
|
||||
};
|
||||
if s.is_ok() {
|
||||
|
@ -238,20 +238,20 @@ pub fn parse_opts(args: &[~str]) -> OptRes {
|
||||
let run_ignored = getopts::opt_present(&matches, "ignored");
|
||||
|
||||
let logfile = getopts::opt_maybe_str(&matches, "logfile");
|
||||
let logfile = logfile.map(|s| Path(*s));
|
||||
let logfile = logfile.map_move(|s| Path(s));
|
||||
|
||||
let run_benchmarks = getopts::opt_present(&matches, "bench");
|
||||
let run_tests = ! run_benchmarks ||
|
||||
getopts::opt_present(&matches, "test");
|
||||
|
||||
let ratchet_metrics = getopts::opt_maybe_str(&matches, "ratchet-metrics");
|
||||
let ratchet_metrics = ratchet_metrics.map(|s| Path(*s));
|
||||
let ratchet_metrics = ratchet_metrics.map_move(|s| Path(s));
|
||||
|
||||
let ratchet_noise_percent = getopts::opt_maybe_str(&matches, "ratchet-noise-percent");
|
||||
let ratchet_noise_percent = ratchet_noise_percent.map(|s| f64::from_str(*s).unwrap());
|
||||
let ratchet_noise_percent = ratchet_noise_percent.map_move(|s| f64::from_str(s).unwrap());
|
||||
|
||||
let save_metrics = getopts::opt_maybe_str(&matches, "save-metrics");
|
||||
let save_metrics = save_metrics.map(|s| Path(*s));
|
||||
let save_metrics = save_metrics.map_move(|s| Path(s));
|
||||
|
||||
let test_opts = TestOpts {
|
||||
filter: filter,
|
||||
|
@ -394,7 +394,7 @@ impl<'self, T> Iterator<&'self T> for TreeSetIterator<'self, T> {
|
||||
/// Advance the iterator to the next node (in order). If there are no more nodes, return `None`.
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<&'self T> {
|
||||
do self.iter.next().map |&(value, _)| { value }
|
||||
do self.iter.next().map_move |(value, _)| { value }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,7 +221,7 @@ fn digest<T:Encodable<json::Encoder>>(t: &T) -> ~str {
|
||||
fn digest_file(path: &Path) -> ~str {
|
||||
let mut sha = ~Sha1::new();
|
||||
let s = io::read_whole_file_str(path);
|
||||
(*sha).input_str(*s.get_ref());
|
||||
(*sha).input_str(s.unwrap());
|
||||
(*sha).result_str()
|
||||
}
|
||||
|
||||
@ -378,7 +378,7 @@ fn test() {
|
||||
let pth = Path("foo.c");
|
||||
{
|
||||
let r = io::file_writer(&pth, [io::Create]);
|
||||
r.get_ref().write_str("int main() { return 0; }");
|
||||
r.unwrap().write_str("int main() { return 0; }");
|
||||
}
|
||||
|
||||
let cx = Context::new(RWArc::new(Database::new(Path("db.json"))),
|
||||
|
@ -130,7 +130,7 @@ fn rustc_help() {
|
||||
fn find_cmd(command_string: &str) -> Option<Command> {
|
||||
do COMMANDS.iter().find_ |command| {
|
||||
command.cmd == command_string
|
||||
}.map_consume(|x| *x)
|
||||
}.map_move(|x| *x)
|
||||
}
|
||||
|
||||
fn cmd_help(args: &[~str]) -> ValidUsage {
|
||||
|
@ -669,8 +669,7 @@ pub fn build_session_options(binary: @str,
|
||||
} else if opt_present(matches, "emit-llvm") {
|
||||
link::output_type_bitcode
|
||||
} else { link::output_type_exe };
|
||||
let sysroot_opt = getopts::opt_maybe_str(matches, "sysroot");
|
||||
let sysroot_opt = sysroot_opt.map(|m| @Path(*m));
|
||||
let sysroot_opt = getopts::opt_maybe_str(matches, "sysroot").map_move(|m| @Path(m));
|
||||
let target_opt = getopts::opt_maybe_str(matches, "target");
|
||||
let target_feature_opt = getopts::opt_maybe_str(matches, "target-feature");
|
||||
let save_temps = getopts::opt_present(matches, "save-temps");
|
||||
|
@ -61,7 +61,9 @@ fn fold_mod(cx: @Context, m: &ast::_mod, fld: @fold::ast_fold) -> ast::_mod {
|
||||
filter_item(cx, *a).chain(|x| fld.fold_item(x))
|
||||
}.collect();
|
||||
let filtered_view_items = do m.view_items.iter().filter_map |a| {
|
||||
filter_view_item(cx, a).map(|&x| fld.fold_view_item(x))
|
||||
do filter_view_item(cx, a).map_move |x| {
|
||||
fld.fold_view_item(x)
|
||||
}
|
||||
}.collect();
|
||||
ast::_mod {
|
||||
view_items: filtered_view_items,
|
||||
@ -83,7 +85,9 @@ fn fold_foreign_mod(
|
||||
) -> ast::foreign_mod {
|
||||
let filtered_items = nm.items.iter().filter_map(|a| filter_foreign_item(cx, *a)).collect();
|
||||
let filtered_view_items = do nm.view_items.iter().filter_map |a| {
|
||||
filter_view_item(cx, a).map(|&x| fld.fold_view_item(x))
|
||||
do filter_view_item(cx, a).map_move |x| {
|
||||
fld.fold_view_item(x)
|
||||
}
|
||||
}.collect();
|
||||
ast::foreign_mod {
|
||||
sort: nm.sort,
|
||||
@ -138,7 +142,7 @@ fn fold_block(
|
||||
filter_stmt(cx, *a).chain(|stmt| fld.fold_stmt(stmt))
|
||||
}.collect();
|
||||
let filtered_view_items = do b.view_items.iter().filter_map |a| {
|
||||
filter_view_item(cx, a).map(|&x| fld.fold_view_item(x))
|
||||
filter_view_item(cx, a).map(|x| fld.fold_view_item(*x))
|
||||
}.collect();
|
||||
ast::Block {
|
||||
view_items: filtered_view_items,
|
||||
|
@ -2159,7 +2159,7 @@ impl TypeNames {
|
||||
}
|
||||
|
||||
pub fn find_type(&self, s: &str) -> Option<Type> {
|
||||
self.named_types.find_equiv(&s).map_consume(|x| Type::from_ref(*x))
|
||||
self.named_types.find_equiv(&s).map_move(|x| Type::from_ref(*x))
|
||||
}
|
||||
|
||||
// We have a depth count, because we seem to make infinite types.
|
||||
|
@ -133,7 +133,7 @@ pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore,
|
||||
pub fn find_extern_mod_stmt_cnum(cstore: &CStore,
|
||||
emod_id: ast::NodeId)
|
||||
-> Option<ast::CrateNum> {
|
||||
cstore.extern_mod_crate_map.find(&emod_id).map_consume(|x| *x)
|
||||
cstore.extern_mod_crate_map.find(&emod_id).map_move(|x| *x)
|
||||
}
|
||||
|
||||
#[deriving(Clone)]
|
||||
|
@ -198,8 +198,8 @@ fn item_def_id(d: ebml::Doc, cdata: cmd) -> ast::def_id {
|
||||
}
|
||||
|
||||
fn get_provided_source(d: ebml::Doc, cdata: cmd) -> Option<ast::def_id> {
|
||||
do reader::maybe_get_doc(d, tag_item_method_provided_source).map |doc| {
|
||||
translate_def_id(cdata, reader::with_doc_data(*doc, parse_def_id))
|
||||
do reader::maybe_get_doc(d, tag_item_method_provided_source).map_move |doc| {
|
||||
translate_def_id(cdata, reader::with_doc_data(doc, parse_def_id))
|
||||
}
|
||||
}
|
||||
|
||||
@ -265,10 +265,10 @@ fn item_ty_param_defs(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd,
|
||||
}
|
||||
|
||||
fn item_ty_region_param(item: ebml::Doc) -> Option<ty::region_variance> {
|
||||
reader::maybe_get_doc(item, tag_region_param).map(|doc| {
|
||||
let mut decoder = reader::Decoder(*doc);
|
||||
do reader::maybe_get_doc(item, tag_region_param).map_move |doc| {
|
||||
let mut decoder = reader::Decoder(doc);
|
||||
Decodable::decode(&mut decoder)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn item_ty_param_count(item: ebml::Doc) -> uint {
|
||||
@ -415,7 +415,7 @@ pub fn get_impl_trait(cdata: cmd,
|
||||
tcx: ty::ctxt) -> Option<@ty::TraitRef>
|
||||
{
|
||||
let item_doc = lookup_item(id, cdata.data);
|
||||
do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map |&tp| {
|
||||
do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map_move |tp| {
|
||||
@doc_trait_ref(tp, tcx, cdata)
|
||||
}
|
||||
}
|
||||
|
@ -286,13 +286,15 @@ pub fn opt_loan_path(cmt: mc::cmt) -> Option<@LoanPath> {
|
||||
}
|
||||
|
||||
mc::cat_deref(cmt_base, _, _) => {
|
||||
opt_loan_path(cmt_base).map(
|
||||
|&lp| @LpExtend(lp, cmt.mutbl, LpDeref))
|
||||
do opt_loan_path(cmt_base).map_move |lp| {
|
||||
@LpExtend(lp, cmt.mutbl, LpDeref)
|
||||
}
|
||||
}
|
||||
|
||||
mc::cat_interior(cmt_base, ik) => {
|
||||
opt_loan_path(cmt_base).map(
|
||||
|&lp| @LpExtend(lp, cmt.mutbl, LpInterior(ik)))
|
||||
do opt_loan_path(cmt_base).map_move |lp| {
|
||||
@LpExtend(lp, cmt.mutbl, LpInterior(ik))
|
||||
}
|
||||
}
|
||||
|
||||
mc::cat_downcast(cmt_base) |
|
||||
|
@ -493,9 +493,9 @@ pub fn compare_lit_exprs(tcx: middle::ty::ctxt, a: &expr, b: &expr) -> Option<in
|
||||
}
|
||||
|
||||
pub fn lit_expr_eq(tcx: middle::ty::ctxt, a: &expr, b: &expr) -> Option<bool> {
|
||||
compare_lit_exprs(tcx, a, b).map(|&val| val == 0)
|
||||
compare_lit_exprs(tcx, a, b).map_move(|val| val == 0)
|
||||
}
|
||||
|
||||
pub fn lit_eq(a: &lit, b: &lit) -> Option<bool> {
|
||||
compare_const_vals(&lit_to_const(a), &lit_to_const(b)).map(|&val| val == 0)
|
||||
compare_const_vals(&lit_to_const(a), &lit_to_const(b)).map_move(|val| val == 0)
|
||||
}
|
||||
|
@ -393,7 +393,7 @@ impl<'self> LanguageItemCollector<'self> {
|
||||
return; // Didn't match.
|
||||
}
|
||||
|
||||
let item_index = self.item_refs.find(&value).map(|x| **x);
|
||||
let item_index = self.item_refs.find(&value).map_move(|x| *x);
|
||||
// prevent borrow checker from considering ^~~~~~~~~~~
|
||||
// self to be borrowed (annoying)
|
||||
|
||||
|
@ -607,9 +607,9 @@ impl Liveness {
|
||||
match expr.node {
|
||||
expr_path(_) => {
|
||||
let def = self.tcx.def_map.get_copy(&expr.id);
|
||||
moves::moved_variable_node_id_from_def(def).map(
|
||||
|rdef| self.variable(*rdef, expr.span)
|
||||
)
|
||||
do moves::moved_variable_node_id_from_def(def).map_move |rdef| {
|
||||
self.variable(rdef, expr.span)
|
||||
}
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
@ -623,9 +623,9 @@ impl Liveness {
|
||||
-> Option<Variable> {
|
||||
match self.tcx.def_map.find(&node_id) {
|
||||
Some(&def) => {
|
||||
moves::moved_variable_node_id_from_def(def).map(
|
||||
|rdef| self.variable(*rdef, span)
|
||||
)
|
||||
do moves::moved_variable_node_id_from_def(def).map_move |rdef| {
|
||||
self.variable(rdef, span)
|
||||
}
|
||||
}
|
||||
None => {
|
||||
self.tcx.sess.span_bug(
|
||||
|
@ -111,7 +111,7 @@ impl RegionMaps {
|
||||
pub fn opt_encl_scope(&self, id: ast::NodeId) -> Option<ast::NodeId> {
|
||||
//! Returns the narrowest scope that encloses `id`, if any.
|
||||
|
||||
self.scope_map.find(&id).map(|&x| *x)
|
||||
self.scope_map.find(&id).map_move(|x| *x)
|
||||
}
|
||||
|
||||
pub fn encl_scope(&self, id: ast::NodeId) -> ast::NodeId {
|
||||
@ -579,8 +579,7 @@ impl DetermineRpCtxt {
|
||||
/// the new variance is joined with the old variance.
|
||||
pub fn add_rp(&mut self, id: ast::NodeId, variance: region_variance) {
|
||||
assert!(id != 0);
|
||||
let old_variance = self.region_paramd_items.find(&id).
|
||||
map_consume(|x| *x);
|
||||
let old_variance = self.region_paramd_items.find(&id).map_move(|x| *x);
|
||||
let joined_variance = match old_variance {
|
||||
None => variance,
|
||||
Some(v) => join_variance(v, variance)
|
||||
|
@ -3358,7 +3358,7 @@ impl Resolver {
|
||||
// item, it's ok
|
||||
match def {
|
||||
def_ty_param(did, _)
|
||||
if self.def_map.find(&did.node).map_consume(|x| *x)
|
||||
if self.def_map.find(&did.node).map_move(|x| *x)
|
||||
== Some(def_typaram_binder(item_id)) => {
|
||||
// ok
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ pub use middle::trans::context::task_llcx;
|
||||
static task_local_insn_key: local_data::Key<@~[&'static str]> = &local_data::Key;
|
||||
|
||||
pub fn with_insn_ctxt(blk: &fn(&[&'static str])) {
|
||||
let opt = local_data::get(task_local_insn_key, |k| k.map(|&k| *k));
|
||||
let opt = local_data::get(task_local_insn_key, |k| k.map_move(|k| *k));
|
||||
if opt.is_some() {
|
||||
blk(*opt.unwrap());
|
||||
}
|
||||
@ -108,7 +108,7 @@ pub struct _InsnCtxt { _x: () }
|
||||
impl Drop for _InsnCtxt {
|
||||
fn drop(&self) {
|
||||
do local_data::modify(task_local_insn_key) |c| {
|
||||
do c.map_consume |ctx| {
|
||||
do c.map_move |ctx| {
|
||||
let mut ctx = (*ctx).clone();
|
||||
ctx.pop();
|
||||
@ctx
|
||||
@ -120,7 +120,7 @@ impl Drop for _InsnCtxt {
|
||||
pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
|
||||
debug!("new InsnCtxt: %s", s);
|
||||
do local_data::modify(task_local_insn_key) |c| {
|
||||
do c.map_consume |ctx| {
|
||||
do c.map_move |ctx| {
|
||||
let mut ctx = (*ctx).clone();
|
||||
ctx.push(s);
|
||||
@ctx
|
||||
|
@ -159,7 +159,7 @@ fn struct_ty(ty: Type,
|
||||
padding: Option<Type>,
|
||||
coerce: bool) -> Type {
|
||||
let size = ty_size(ty) * 8;
|
||||
let mut fields = padding.map_default(~[], |p| ~[*p]);
|
||||
let mut fields = padding.map_move_default(~[], |p| ~[p]);
|
||||
|
||||
if coerce {
|
||||
fields = vec::append(fields, coerce_to_int(size));
|
||||
|
@ -1010,8 +1010,7 @@ pub fn node_id_type_params(bcx: @mut Block, id: ast::NodeId) -> ~[ty::t] {
|
||||
pub fn node_vtables(bcx: @mut Block, id: ast::NodeId)
|
||||
-> Option<typeck::vtable_res> {
|
||||
let raw_vtables = bcx.ccx().maps.vtable_map.find(&id);
|
||||
raw_vtables.map(
|
||||
|&vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts))
|
||||
raw_vtables.map_move(|vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts))
|
||||
}
|
||||
|
||||
pub fn resolve_vtables_in_fn_ctxt(fcx: &FunctionContext, vts: typeck::vtable_res)
|
||||
|
@ -241,7 +241,7 @@ impl Drop for CrateContext {
|
||||
static task_local_llcx_key: local_data::Key<@ContextRef> = &local_data::Key;
|
||||
|
||||
pub fn task_llcx() -> ContextRef {
|
||||
let opt = local_data::get(task_local_llcx_key, |k| k.map(|&k| *k));
|
||||
let opt = local_data::get(task_local_llcx_key, |k| k.map_move(|k| *k));
|
||||
*opt.expect("task-local LLVMContextRef wasn't ever set!")
|
||||
}
|
||||
|
||||
|
@ -162,7 +162,7 @@ pub fn trans_method_callee(bcx: @mut Block,
|
||||
data: Method(MethodData {
|
||||
llfn: callee_fn.llfn,
|
||||
llself: val,
|
||||
temp_cleanup: temp_cleanups.head_opt().map(|&v| *v),
|
||||
temp_cleanup: temp_cleanups.head_opt().map_move(|v| *v),
|
||||
self_ty: node_id_type(bcx, this.id),
|
||||
self_mode: mentry.self_mode,
|
||||
})
|
||||
@ -339,7 +339,7 @@ pub fn trans_monomorphized_callee(bcx: @mut Block,
|
||||
data: Method(MethodData {
|
||||
llfn: llfn_val,
|
||||
llself: llself_val,
|
||||
temp_cleanup: temp_cleanups.head_opt().map(|&v| *v),
|
||||
temp_cleanup: temp_cleanups.head_opt().map_move(|v| *v),
|
||||
self_ty: node_id_type(bcx, base.id),
|
||||
self_mode: mentry.self_mode,
|
||||
})
|
||||
|
@ -3557,7 +3557,7 @@ pub fn def_has_ty_params(def: ast::def) -> bool {
|
||||
|
||||
pub fn provided_source(cx: ctxt, id: ast::def_id)
|
||||
-> Option<ast::def_id> {
|
||||
cx.provided_method_sources.find(&id).map(|x| **x)
|
||||
cx.provided_method_sources.find(&id).map_move(|x| *x)
|
||||
}
|
||||
|
||||
pub fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[@Method] {
|
||||
@ -3710,8 +3710,9 @@ fn struct_ctor_id(cx: ctxt, struct_did: ast::def_id) -> Option<ast::def_id> {
|
||||
Some(&ast_map::node_item(item, _)) => {
|
||||
match item.node {
|
||||
ast::item_struct(struct_def, _) => {
|
||||
struct_def.ctor_id.map(|ctor_id|
|
||||
ast_util::local_def(*ctor_id))
|
||||
do struct_def.ctor_id.map_move |ctor_id| {
|
||||
ast_util::local_def(ctor_id)
|
||||
}
|
||||
}
|
||||
_ => cx.sess.bug("called struct_ctor_id on non-struct")
|
||||
}
|
||||
@ -4443,15 +4444,15 @@ pub fn count_traits_and_supertraits(tcx: ctxt,
|
||||
}
|
||||
|
||||
pub fn get_tydesc_ty(tcx: ctxt) -> Result<t, ~str> {
|
||||
do tcx.lang_items.require(TyDescStructLangItem).map |tydesc_lang_item| {
|
||||
tcx.intrinsic_defs.find_copy(tydesc_lang_item)
|
||||
do tcx.lang_items.require(TyDescStructLangItem).map_move |tydesc_lang_item| {
|
||||
tcx.intrinsic_defs.find_copy(&tydesc_lang_item)
|
||||
.expect("Failed to resolve TyDesc")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_opaque_ty(tcx: ctxt) -> Result<t, ~str> {
|
||||
do tcx.lang_items.require(OpaqueStructLangItem).map |opaque_lang_item| {
|
||||
tcx.intrinsic_defs.find_copy(opaque_lang_item)
|
||||
do tcx.lang_items.require(OpaqueStructLangItem).map_move |opaque_lang_item| {
|
||||
tcx.intrinsic_defs.find_copy(&opaque_lang_item)
|
||||
.expect("Failed to resolve Opaque")
|
||||
}
|
||||
}
|
||||
|
@ -621,9 +621,9 @@ fn ty_of_method_or_bare_fn<AC:AstConv,RS:region_scope + Clone + 'static>(
|
||||
in_binding_rscope(rscope,
|
||||
RegionParamNames(bound_lifetime_names.clone()));
|
||||
|
||||
let opt_transformed_self_ty = opt_self_info.map(|&self_info| {
|
||||
let opt_transformed_self_ty = do opt_self_info.map_move |self_info| {
|
||||
transform_self_ty(this, &rb, self_info)
|
||||
});
|
||||
};
|
||||
|
||||
let input_tys = decl.inputs.map(|a| ty_of_arg(this, &rb, a, None));
|
||||
|
||||
|
@ -158,9 +158,9 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::pat, path: &ast::Path,
|
||||
None => {
|
||||
fcx.infcx().type_error_message_str_with_expected(pat.span,
|
||||
|expected, actual| {
|
||||
expected.map_default(~"", |e| {
|
||||
expected.map_move_default(~"", |e| {
|
||||
fmt!("mismatched types: expected `%s` but found %s",
|
||||
*e, actual)})},
|
||||
e, actual)})},
|
||||
Some(expected), ~"a structure pattern",
|
||||
None);
|
||||
fcx.write_error(pat.id);
|
||||
@ -201,9 +201,9 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::pat, path: &ast::Path,
|
||||
_ => {
|
||||
fcx.infcx().type_error_message_str_with_expected(pat.span,
|
||||
|expected, actual| {
|
||||
expected.map_default(~"", |e| {
|
||||
expected.map_move_default(~"", |e| {
|
||||
fmt!("mismatched types: expected `%s` but found %s",
|
||||
*e, actual)})},
|
||||
e, actual)})},
|
||||
Some(expected), ~"an enum or structure pattern",
|
||||
None);
|
||||
fcx.write_error(pat.id);
|
||||
@ -535,9 +535,9 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
|
||||
_ => ty::terr_mismatch
|
||||
};
|
||||
fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| {
|
||||
expected.map_default(~"", |e| {
|
||||
expected.map_move_default(~"", |e| {
|
||||
fmt!("mismatched types: expected `%s` but found %s",
|
||||
*e, actual)})}, Some(expected), ~"tuple", Some(&type_error));
|
||||
e, actual)})}, Some(expected), ~"tuple", Some(&type_error));
|
||||
fcx.write_error(pat.id);
|
||||
}
|
||||
}
|
||||
@ -584,9 +584,9 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
|
||||
fcx.infcx().type_error_message_str_with_expected(
|
||||
pat.span,
|
||||
|expected, actual| {
|
||||
expected.map_default(~"", |e| {
|
||||
expected.map_move_default(~"", |e| {
|
||||
fmt!("mismatched types: expected `%s` but found %s",
|
||||
*e, actual)})},
|
||||
e, actual)})},
|
||||
Some(expected),
|
||||
~"a vector pattern",
|
||||
None);
|
||||
@ -642,9 +642,9 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
|
||||
fcx.infcx().type_error_message_str_with_expected(
|
||||
span,
|
||||
|expected, actual| {
|
||||
expected.map_default(~"", |e| {
|
||||
expected.map_move_default(~"", |e| {
|
||||
fmt!("mismatched types: expected `%s` but found %s",
|
||||
*e, actual)})},
|
||||
e, actual)})},
|
||||
Some(expected),
|
||||
fmt!("%s pattern", match pointer_kind {
|
||||
Managed => "an @-box",
|
||||
|
@ -364,8 +364,8 @@ pub fn check_fn(ccx: @mut CrateCtxt,
|
||||
|br| ty::re_free(ty::FreeRegion {scope_id: body.id,
|
||||
bound_region: br}));
|
||||
let opt_self_info =
|
||||
opt_self_info.map(
|
||||
|si| SelfInfo {self_ty: opt_self_ty.unwrap(), ..*si});
|
||||
opt_self_info.map_move(
|
||||
|si| SelfInfo {self_ty: opt_self_ty.unwrap(), .. si});
|
||||
(isr, opt_self_info, fn_sig)
|
||||
};
|
||||
|
||||
@ -536,7 +536,7 @@ pub fn check_method(ccx: @mut CrateCtxt,
|
||||
{
|
||||
let method_def_id = local_def(method.id);
|
||||
let method_ty = ty::method(ccx.tcx, method_def_id);
|
||||
let opt_self_info = method_ty.transformed_self_ty.map(|&ty| {
|
||||
let opt_self_info = method_ty.transformed_self_ty.map_move(|ty| {
|
||||
SelfInfo {self_ty: ty,
|
||||
self_id: method.self_id,
|
||||
span: method.explicit_self.span}
|
||||
@ -557,7 +557,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
|
||||
|
||||
for p in fields.iter() {
|
||||
let (id, sp) = *p;
|
||||
let orig_sp = field_names.find(&id).map_consume(|x| *x);
|
||||
let orig_sp = field_names.find(&id).map_move(|x| *x);
|
||||
match orig_sp {
|
||||
Some(orig_sp) => {
|
||||
tcx.sess.span_err(sp, fmt!("Duplicate field name %s in record type declaration",
|
||||
@ -601,7 +601,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
|
||||
check_bare_fn(ccx, decl, body, it.id, None);
|
||||
}
|
||||
ast::item_impl(_, _, _, ref ms) => {
|
||||
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
|
||||
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
|
||||
debug!("item_impl %s with id %d rp %?",
|
||||
ccx.tcx.sess.str_of(it.ident), it.id, rp);
|
||||
for m in ms.iter() {
|
||||
@ -1877,8 +1877,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||
for field in ast_fields.iter() {
|
||||
let mut expected_field_type = ty::mk_err();
|
||||
|
||||
let pair = class_field_map.find(&field.ident).
|
||||
map_consume(|x| *x);
|
||||
let pair = class_field_map.find(&field.ident).map_move(|x| *x);
|
||||
match pair {
|
||||
None => {
|
||||
tcx.sess.span_err(
|
||||
@ -1962,7 +1961,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||
if class_id.crate == ast::LOCAL_CRATE {
|
||||
region_parameterized =
|
||||
tcx.region_paramd_items.find(&class_id.node).
|
||||
map_consume(|x| *x);
|
||||
map_move(|x| *x);
|
||||
match tcx.items.find(&class_id.node) {
|
||||
Some(&ast_map::node_item(@ast::item {
|
||||
node: ast::item_struct(_, ref generics),
|
||||
@ -2050,7 +2049,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|
||||
let raw_type;
|
||||
if enum_id.crate == ast::LOCAL_CRATE {
|
||||
region_parameterized =
|
||||
tcx.region_paramd_items.find(&enum_id.node).map_consume(|x| *x);
|
||||
tcx.region_paramd_items.find(&enum_id.node).map_move(|x| *x);
|
||||
match tcx.items.find(&enum_id.node) {
|
||||
Some(&ast_map::node_item(@ast::item {
|
||||
node: ast::item_enum(_, ref generics),
|
||||
|
@ -40,9 +40,9 @@ pub fn replace_bound_regions_in_fn_sig(
|
||||
|
||||
debug!("replace_bound_regions_in_fn_sig(self_ty=%?, fn_sig=%s, \
|
||||
all_tys=%?)",
|
||||
opt_self_ty.map(|&t| ppaux::ty_to_str(tcx, t)),
|
||||
opt_self_ty.map(|t| ppaux::ty_to_str(tcx, *t)),
|
||||
ppaux::fn_sig_to_str(tcx, fn_sig),
|
||||
all_tys.map(|&t| ppaux::ty_to_str(tcx, t)));
|
||||
all_tys.map(|t| ppaux::ty_to_str(tcx, *t)));
|
||||
let _i = indenter();
|
||||
|
||||
let isr = do create_bound_region_mapping(tcx, isr, all_tys) |br| {
|
||||
@ -52,12 +52,12 @@ pub fn replace_bound_regions_in_fn_sig(
|
||||
let new_fn_sig = ty::fold_sig(fn_sig, |t| {
|
||||
replace_bound_regions(tcx, isr, t)
|
||||
});
|
||||
let new_self_ty = opt_self_ty.map(|&t| replace_bound_regions(tcx, isr, t));
|
||||
let new_self_ty = opt_self_ty.map(|t| replace_bound_regions(tcx, isr, *t));
|
||||
|
||||
debug!("result of replace_bound_regions_in_fn_sig: \
|
||||
new_self_ty=%?, \
|
||||
fn_sig=%s",
|
||||
new_self_ty.map(|&t| ppaux::ty_to_str(tcx, t)),
|
||||
new_self_ty.map(|t| ppaux::ty_to_str(tcx, *t)),
|
||||
ppaux::fn_sig_to_str(tcx, &new_fn_sig));
|
||||
|
||||
return (isr, new_self_ty, new_fn_sig);
|
||||
|
@ -131,9 +131,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
|
||||
// ty is the value supplied for the type parameter A...
|
||||
let mut param_result = ~[];
|
||||
|
||||
do ty::each_bound_trait_and_supertraits(
|
||||
tcx, type_param_bounds.trait_bounds) |trait_ref|
|
||||
{
|
||||
do ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds) |trait_ref| {
|
||||
// ...and here trait_ref is each bound that was declared on A,
|
||||
// expressed in terms of the type parameters.
|
||||
|
||||
|
@ -198,7 +198,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
|
||||
trait_id: ast::NodeId)
|
||||
{
|
||||
let tcx = ccx.tcx;
|
||||
let region_paramd = tcx.region_paramd_items.find(&trait_id).map(|&x| *x);
|
||||
let region_paramd = tcx.region_paramd_items.find(&trait_id).map_move(|x| *x);
|
||||
match tcx.items.get_copy(&trait_id) {
|
||||
ast_map::node_item(@ast::item {
|
||||
node: ast::item_trait(ref generics, _, ref ms),
|
||||
@ -817,7 +817,7 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
|
||||
|
||||
pub fn convert(ccx: &CrateCtxt, it: &ast::item) {
|
||||
let tcx = ccx.tcx;
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
|
||||
debug!("convert: item %s with id %d rp %?",
|
||||
tcx.sess.str_of(it.ident), it.id, rp);
|
||||
match it.node {
|
||||
@ -1020,7 +1020,7 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::item) -> @ty::TraitDef {
|
||||
Some(&def) => return def,
|
||||
_ => {}
|
||||
}
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
|
||||
match it.node {
|
||||
ast::item_trait(ref generics, _, _) => {
|
||||
let self_ty = ty::mk_self(tcx, def_id);
|
||||
@ -1049,7 +1049,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
|
||||
Some(&tpt) => return tpt,
|
||||
_ => {}
|
||||
}
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
|
||||
match it.node {
|
||||
ast::item_static(ref t, _, _) => {
|
||||
let typ = ccx.to_ty(&empty_rscope, t);
|
||||
@ -1086,7 +1086,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
|
||||
None => { }
|
||||
}
|
||||
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
|
||||
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
|
||||
let region_parameterization =
|
||||
RegionParameterization::from_variance_and_generics(rp, generics);
|
||||
let tpt = {
|
||||
|
@ -716,12 +716,13 @@ impl InferCtxt {
|
||||
err: Option<&ty::type_err>) {
|
||||
debug!("hi! expected_ty = %?, actual_ty = %s", expected_ty, actual_ty);
|
||||
|
||||
let error_str = err.map_default(~"", |t_err|
|
||||
fmt!(" (%s)",
|
||||
ty::type_err_to_str(self.tcx, *t_err)));
|
||||
let resolved_expected = expected_ty.map(|&e_ty|
|
||||
{ self.resolve_type_vars_if_possible(e_ty) });
|
||||
if !resolved_expected.map_default(false, |&e| { ty::type_is_error(e) }) {
|
||||
let error_str = do err.map_move_default(~"") |t_err| {
|
||||
fmt!(" (%s)", ty::type_err_to_str(self.tcx, t_err))
|
||||
};
|
||||
let resolved_expected = do expected_ty.map_move |e_ty| {
|
||||
self.resolve_type_vars_if_possible(e_ty)
|
||||
};
|
||||
if !resolved_expected.map_move_default(false, |e| { ty::type_is_error(e) }) {
|
||||
match resolved_expected {
|
||||
None => self.tcx.sess.span_err(sp,
|
||||
fmt!("%s%s", mk_msg(None, actual_ty), error_str)),
|
||||
|
@ -249,13 +249,12 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
|
||||
|
||||
let sopts = build_session_options(binary, matches, demitter);
|
||||
let sess = build_session(sopts, demitter);
|
||||
let odir = getopts::opt_maybe_str(matches, "out-dir");
|
||||
let odir = odir.map(|o| Path(*o));
|
||||
let ofile = getopts::opt_maybe_str(matches, "o");
|
||||
let ofile = ofile.map(|o| Path(*o));
|
||||
let odir = getopts::opt_maybe_str(matches, "out-dir").map_move(|o| Path(o));
|
||||
let ofile = getopts::opt_maybe_str(matches, "o").map_move(|o| Path(o));
|
||||
let cfg = build_configuration(sess, binary, &input);
|
||||
let pretty = getopts::opt_default(matches, "pretty", "normal").map(
|
||||
|a| parse_pretty(sess, *a));
|
||||
let pretty = do getopts::opt_default(matches, "pretty", "normal").map_move |a| {
|
||||
parse_pretty(sess, a)
|
||||
};
|
||||
match pretty {
|
||||
Some::<pp_mode>(ppm) => {
|
||||
pretty_print_input(sess, cfg, &input, ppm);
|
||||
|
@ -140,7 +140,7 @@ fn config_from_opts(
|
||||
let result = result::Ok(config);
|
||||
let result = do result.chain |config| {
|
||||
let output_dir = getopts::opt_maybe_str(matches, opt_output_dir());
|
||||
let output_dir = output_dir.map(|s| Path(*s));
|
||||
let output_dir = output_dir.map_move(|s| Path(s));
|
||||
result::Ok(Config {
|
||||
output_dir: output_dir.unwrap_or_default(config.output_dir.clone()),
|
||||
.. config
|
||||
@ -148,8 +148,8 @@ fn config_from_opts(
|
||||
};
|
||||
let result = do result.chain |config| {
|
||||
let output_format = getopts::opt_maybe_str(matches, opt_output_format());
|
||||
do output_format.map_default(result::Ok(config.clone())) |output_format| {
|
||||
do parse_output_format(*output_format).chain |output_format| {
|
||||
do output_format.map_move_default(result::Ok(config.clone())) |output_format| {
|
||||
do parse_output_format(output_format).chain |output_format| {
|
||||
result::Ok(Config {
|
||||
output_format: output_format,
|
||||
.. config.clone()
|
||||
@ -160,8 +160,8 @@ fn config_from_opts(
|
||||
let result = do result.chain |config| {
|
||||
let output_style =
|
||||
getopts::opt_maybe_str(matches, opt_output_style());
|
||||
do output_style.map_default(result::Ok(config.clone())) |output_style| {
|
||||
do parse_output_style(*output_style).chain |output_style| {
|
||||
do output_style.map_move_default(result::Ok(config.clone())) |output_style| {
|
||||
do parse_output_style(output_style).chain |output_style| {
|
||||
result::Ok(Config {
|
||||
output_style: output_style,
|
||||
.. config.clone()
|
||||
|
@ -260,9 +260,9 @@ fn fold_impl(
|
||||
}, _) => {
|
||||
let bounds = pprust::generics_to_str(generics, extract::interner());
|
||||
let bounds = if bounds.is_empty() { None } else { Some(bounds) };
|
||||
let trait_types = opt_trait_type.map_default(~[], |p| {
|
||||
let trait_types = do opt_trait_type.map_default(~[]) |p| {
|
||||
~[pprust::path_to_str(&p.path, extract::interner())]
|
||||
});
|
||||
};
|
||||
(bounds,
|
||||
trait_types,
|
||||
Some(pprust::ty_to_str(
|
||||
|
@ -203,7 +203,7 @@ fn run(mut program: ~Program, binary: ~str, lib_search_paths: ~[~str],
|
||||
}
|
||||
}
|
||||
}
|
||||
result = do blk.expr.map_consume |e| {
|
||||
result = do blk.expr.map_move |e| {
|
||||
do with_pp(intr) |pp, _| { pprust::print_expr(pp, e); }
|
||||
};
|
||||
}
|
||||
|
@ -238,7 +238,7 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
|
||||
let len_buckets = self.buckets.len();
|
||||
let bucket = self.buckets[idx].take();
|
||||
|
||||
let value = do bucket.map_consume |bucket| {
|
||||
let value = do bucket.map_move |bucket| {
|
||||
bucket.value
|
||||
};
|
||||
|
||||
@ -479,7 +479,7 @@ impl<K: Hash + Eq, V> HashMap<K, V> {
|
||||
impl<K: Hash + Eq, V: Clone> HashMap<K, V> {
|
||||
/// Like `find`, but returns a copy of the value.
|
||||
pub fn find_copy(&self, k: &K) -> Option<V> {
|
||||
self.find(k).map_consume(|v| (*v).clone())
|
||||
self.find(k).map_move(|v| (*v).clone())
|
||||
}
|
||||
|
||||
/// Like `get`, but returns a copy of the value.
|
||||
|
@ -674,7 +674,7 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
|
||||
Some((y, y_val))
|
||||
}
|
||||
}
|
||||
}).map_consume(|(x, _)| x)
|
||||
}).map_move(|(x, _)| x)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -689,7 +689,7 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
|
||||
Some((y, y_val))
|
||||
}
|
||||
}
|
||||
}).map_consume(|(x, _)| x)
|
||||
}).map_move(|(x, _)| x)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1382,7 +1382,7 @@ impl<'self, A, T: Iterator<A>, B, U: Iterator<B>> Iterator<B> for
|
||||
return Some(x)
|
||||
}
|
||||
}
|
||||
match self.iter.next().map_consume(|x| (self.f)(x)) {
|
||||
match self.iter.next().map_move(|x| (self.f)(x)) {
|
||||
None => return self.backiter.chain_mut_ref(|it| it.next()),
|
||||
next => self.frontiter = next,
|
||||
}
|
||||
@ -1414,7 +1414,7 @@ impl<'self,
|
||||
y => return y
|
||||
}
|
||||
}
|
||||
match self.iter.next_back().map_consume(|x| (self.f)(x)) {
|
||||
match self.iter.next_back().map_move(|x| (self.f)(x)) {
|
||||
None => return self.frontiter.chain_mut_ref(|it| it.next_back()),
|
||||
next => self.backiter = next,
|
||||
}
|
||||
|
@ -110,16 +110,16 @@ fn test_tls_multitask() {
|
||||
set(my_key, @~"parent data");
|
||||
do task::spawn {
|
||||
// TLS shouldn't carry over.
|
||||
assert!(get(my_key, |k| k.map(|&k| *k)).is_none());
|
||||
assert!(get(my_key, |k| k.map_move(|k| *k)).is_none());
|
||||
set(my_key, @~"child data");
|
||||
assert!(*(get(my_key, |k| k.map(|&k| *k)).unwrap()) ==
|
||||
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) ==
|
||||
~"child data");
|
||||
// should be cleaned up for us
|
||||
}
|
||||
// Must work multiple times
|
||||
assert!(*(get(my_key, |k| k.map(|&k| *k)).unwrap()) == ~"parent data");
|
||||
assert!(*(get(my_key, |k| k.map(|&k| *k)).unwrap()) == ~"parent data");
|
||||
assert!(*(get(my_key, |k| k.map(|&k| *k)).unwrap()) == ~"parent data");
|
||||
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"parent data");
|
||||
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"parent data");
|
||||
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"parent data");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -127,7 +127,7 @@ fn test_tls_overwrite() {
|
||||
static my_key: Key<@~str> = &Key;
|
||||
set(my_key, @~"first data");
|
||||
set(my_key, @~"next data"); // Shouldn't leak.
|
||||
assert!(*(get(my_key, |k| k.map(|&k| *k)).unwrap()) == ~"next data");
|
||||
assert!(*(get(my_key, |k| k.map_move(|k| *k)).unwrap()) == ~"next data");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -208,6 +208,12 @@ impl<T> Option<T> {
|
||||
match *self { Some(ref mut x) => Some(f(x)), None => None }
|
||||
}
|
||||
|
||||
/// Applies a function to the contained value or returns a default
|
||||
#[inline]
|
||||
pub fn map_default<'a, U>(&'a self, def: U, f: &fn(&'a T) -> U) -> U {
|
||||
match *self { None => def, Some(ref t) => f(t) }
|
||||
}
|
||||
|
||||
/// Maps a `Some` value from one type to another by a mutable reference,
|
||||
/// or returns a default value.
|
||||
#[inline]
|
||||
@ -218,21 +224,15 @@ impl<T> Option<T> {
|
||||
/// As `map`, but consumes the option and gives `f` ownership to avoid
|
||||
/// copying.
|
||||
#[inline]
|
||||
pub fn map_consume<U>(self, f: &fn(v: T) -> U) -> Option<U> {
|
||||
match self { None => None, Some(v) => Some(f(v)) }
|
||||
}
|
||||
|
||||
/// Applies a function to the contained value or returns a default
|
||||
#[inline]
|
||||
pub fn map_default<'a, U>(&'a self, def: U, f: &fn(&'a T) -> U) -> U {
|
||||
match *self { None => def, Some(ref t) => f(t) }
|
||||
pub fn map_move<U>(self, f: &fn(T) -> U) -> Option<U> {
|
||||
match self { Some(x) => Some(f(x)), None => None }
|
||||
}
|
||||
|
||||
/// As `map_default`, but consumes the option and gives `f`
|
||||
/// ownership to avoid copying.
|
||||
#[inline]
|
||||
pub fn map_consume_default<U>(self, def: U, f: &fn(v: T) -> U) -> U {
|
||||
match self { None => def, Some(v) => f(v) }
|
||||
pub fn map_move_default<U>(self, def: U, f: &fn(T) -> U) -> U {
|
||||
match self { None => def, Some(t) => f(t) }
|
||||
}
|
||||
|
||||
/// Take the value out of the option, leaving a `None` in its place.
|
||||
@ -241,20 +241,6 @@ impl<T> Option<T> {
|
||||
util::replace(self, None)
|
||||
}
|
||||
|
||||
/// As `map_consume`, but swaps a None into the original option rather
|
||||
/// than consuming it by-value.
|
||||
#[inline]
|
||||
pub fn take_map<U>(&mut self, blk: &fn(T) -> U) -> Option<U> {
|
||||
self.take().map_consume(blk)
|
||||
}
|
||||
|
||||
/// As `map_consume_default`, but swaps a None into the original option
|
||||
/// rather than consuming it by-value.
|
||||
#[inline]
|
||||
pub fn take_map_default<U> (&mut self, def: U, blk: &fn(T) -> U) -> U {
|
||||
self.take().map_consume_default(def, blk)
|
||||
}
|
||||
|
||||
/// Apply a function to the contained value or do nothing.
|
||||
/// Returns true if the contained value was mutated.
|
||||
pub fn mutate(&mut self, f: &fn(T) -> T) -> bool {
|
||||
|
@ -498,9 +498,7 @@ pub fn self_exe_path() -> Option<Path> {
|
||||
}
|
||||
}
|
||||
|
||||
do load_self().map |pth| {
|
||||
Path(*pth).dir_path()
|
||||
}
|
||||
load_self().map_move(|path| Path(path).dir_path())
|
||||
}
|
||||
|
||||
|
||||
|
@ -149,6 +149,40 @@ impl<T, E: ToStr> Result<T, E> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Call a method based on a previous result
|
||||
///
|
||||
/// If `self` is `Ok` then the value is extracted and passed to `op`
|
||||
/// whereupon `op`s result is wrapped in `Ok` and returned. if `self` is
|
||||
/// `Err` then it is immediately returned. This function can be used to
|
||||
/// compose the results of two functions.
|
||||
///
|
||||
/// Example:
|
||||
///
|
||||
/// let res = do read_file(file).map_move |buf| {
|
||||
/// parse_bytes(buf)
|
||||
/// }
|
||||
#[inline]
|
||||
pub fn map_move<U>(self, op: &fn(T) -> U) -> Result<U,E> {
|
||||
match self {
|
||||
Ok(t) => Ok(op(t)),
|
||||
Err(e) => Err(e)
|
||||
}
|
||||
}
|
||||
|
||||
/// Call a method based on a previous result
|
||||
///
|
||||
/// If `self` is `Err` then the value is extracted and passed to `op`
|
||||
/// whereupon `op`s result is wrapped in an `Err` and returned. if `self` is
|
||||
/// `Ok` then it is immediately returned. This function can be used to pass
|
||||
/// through a successful result while handling an error.
|
||||
#[inline]
|
||||
pub fn map_err_move<F>(self, op: &fn(E) -> F) -> Result<T,F> {
|
||||
match self {
|
||||
Ok(t) => Ok(t),
|
||||
Err(e) => Err(op(e))
|
||||
}
|
||||
}
|
||||
|
||||
/// Call a method based on a previous result
|
||||
///
|
||||
/// If `self` is `Ok` then the value is extracted and passed to `op`
|
||||
@ -312,7 +346,9 @@ pub fn iter_vec2<S, T, U: ToStr>(ss: &[S], ts: &[T],
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use either;
|
||||
use str::OwnedStr;
|
||||
|
||||
pub fn op1() -> Result<int, ~str> { Ok(666) }
|
||||
|
||||
@ -359,14 +395,26 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
pub fn test_impl_map() {
|
||||
assert_eq!(Ok::<~str, ~str>(~"a").map(|_x| ~"b"), Ok(~"b"));
|
||||
assert_eq!(Err::<~str, ~str>(~"a").map(|_x| ~"b"), Err(~"a"));
|
||||
assert_eq!(Ok::<~str, ~str>(~"a").map(|x| (~"b").append(*x)), Ok(~"ba"));
|
||||
assert_eq!(Err::<~str, ~str>(~"a").map(|x| (~"b").append(*x)), Err(~"a"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_impl_map_err() {
|
||||
assert_eq!(Ok::<~str, ~str>(~"a").map_err(|_x| ~"b"), Ok(~"a"));
|
||||
assert_eq!(Err::<~str, ~str>(~"a").map_err(|_x| ~"b"), Err(~"b"));
|
||||
assert_eq!(Ok::<~str, ~str>(~"a").map_err(|x| (~"b").append(*x)), Ok(~"a"));
|
||||
assert_eq!(Err::<~str, ~str>(~"a").map_err(|x| (~"b").append(*x)), Err(~"ba"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_impl_map_move() {
|
||||
assert_eq!(Ok::<~str, ~str>(~"a").map_move(|x| x + "b"), Ok(~"ab"));
|
||||
assert_eq!(Err::<~str, ~str>(~"a").map_move(|x| x + "b"), Err(~"a"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_impl_map_err_move() {
|
||||
assert_eq!(Ok::<~str, ~str>(~"a").map_err_move(|x| x + "b"), Ok(~"a"));
|
||||
assert_eq!(Err::<~str, ~str>(~"a").map_err_move(|x| x + "b"), Err(~"ab"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -159,7 +159,7 @@ impl<T> ChanOne<T> {
|
||||
// Port is blocked. Wake it up.
|
||||
let recvr = BlockedTask::cast_from_uint(task_as_state);
|
||||
if do_resched {
|
||||
do recvr.wake().map_consume |woken_task| {
|
||||
do recvr.wake().map_move |woken_task| {
|
||||
Scheduler::run_task(woken_task);
|
||||
};
|
||||
} else {
|
||||
@ -381,7 +381,7 @@ impl<T> Drop for ChanOne<T> {
|
||||
// The port is blocked waiting for a message we will never send. Wake it.
|
||||
assert!((*this.packet()).payload.is_none());
|
||||
let recvr = BlockedTask::cast_from_uint(task_as_state);
|
||||
do recvr.wake().map_consume |woken_task| {
|
||||
do recvr.wake().map_move |woken_task| {
|
||||
Scheduler::run_task(woken_task);
|
||||
};
|
||||
}
|
||||
|
@ -402,10 +402,10 @@ impl KillHandle {
|
||||
|| {
|
||||
// Prefer to check tombstones that were there first,
|
||||
// being "more fair" at the expense of tail-recursion.
|
||||
others.take().map_consume_default(true, |f| f()) && {
|
||||
others.take().map_move_default(true, |f| f()) && {
|
||||
let mut inner = this.take().unwrap();
|
||||
(!inner.any_child_failed) &&
|
||||
inner.child_tombstones.take_map_default(true, |f| f())
|
||||
inner.child_tombstones.take().map_move_default(true, |f| f())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -424,7 +424,7 @@ impl KillHandle {
|
||||
let others = Cell::new(other_tombstones); // :(
|
||||
|| {
|
||||
// Prefer fairness to tail-recursion, as in above case.
|
||||
others.take().map_consume_default(true, |f| f()) &&
|
||||
others.take().map_move_default(true, |f| f()) &&
|
||||
f.take()()
|
||||
}
|
||||
}
|
||||
@ -493,7 +493,7 @@ impl Death {
|
||||
{ use util; util::ignore(group); }
|
||||
|
||||
// Step 1. Decide if we need to collect child failures synchronously.
|
||||
do self.on_exit.take_map |on_exit| {
|
||||
do self.on_exit.take().map_move |on_exit| {
|
||||
if success {
|
||||
// We succeeded, but our children might not. Need to wait for them.
|
||||
let mut inner = self.kill_handle.take_unwrap().unwrap();
|
||||
@ -501,7 +501,7 @@ impl Death {
|
||||
success = false;
|
||||
} else {
|
||||
// Lockless access to tombstones protected by unwrap barrier.
|
||||
success = inner.child_tombstones.take_map_default(true, |f| f());
|
||||
success = inner.child_tombstones.take().map_move_default(true, |f| f());
|
||||
}
|
||||
}
|
||||
on_exit(success);
|
||||
@ -510,12 +510,12 @@ impl Death {
|
||||
// Step 2. Possibly alert possibly-watching parent to failure status.
|
||||
// Note that as soon as parent_handle goes out of scope, the parent
|
||||
// can successfully unwrap its handle and collect our reported status.
|
||||
do self.watching_parent.take_map |mut parent_handle| {
|
||||
do self.watching_parent.take().map_move |mut parent_handle| {
|
||||
if success {
|
||||
// Our handle might be None if we had an exit callback, and
|
||||
// already unwrapped it. But 'success' being true means no
|
||||
// child failed, so there's nothing to do (see below case).
|
||||
do self.kill_handle.take_map |own_handle| {
|
||||
do self.kill_handle.take().map_move |own_handle| {
|
||||
own_handle.reparent_children_to(&mut parent_handle);
|
||||
};
|
||||
} else {
|
||||
|
@ -325,7 +325,7 @@ impl Scheduler {
|
||||
/// As enqueue_task, but with the possibility for the blocked task to
|
||||
/// already have been killed.
|
||||
pub fn enqueue_blocked_task(&mut self, blocked_task: BlockedTask) {
|
||||
do blocked_task.wake().map_consume |task| {
|
||||
do blocked_task.wake().map_move |task| {
|
||||
self.enqueue_task(task);
|
||||
};
|
||||
}
|
||||
@ -533,7 +533,7 @@ impl Scheduler {
|
||||
sched.enqueue_blocked_task(last_task);
|
||||
}
|
||||
};
|
||||
opt.map_consume(Local::put);
|
||||
opt.map_move(Local::put);
|
||||
}
|
||||
|
||||
// The primary function for changing contexts. In the current
|
||||
|
@ -465,10 +465,10 @@ mod test {
|
||||
do run_in_newsched_task() {
|
||||
static key: local_data::Key<@~str> = &local_data::Key;
|
||||
local_data::set(key, @~"data");
|
||||
assert!(*local_data::get(key, |k| k.map(|&k| *k)).unwrap() == ~"data");
|
||||
assert!(*local_data::get(key, |k| k.map_move(|k| *k)).unwrap() == ~"data");
|
||||
static key2: local_data::Key<@~str> = &local_data::Key;
|
||||
local_data::set(key2, @~"data");
|
||||
assert!(*local_data::get(key2, |k| k.map(|&k| *k)).unwrap() == ~"data");
|
||||
assert!(*local_data::get(key2, |k| k.map_move(|k| *k)).unwrap() == ~"data");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1849,7 +1849,7 @@ impl<'self> StrSlice<'self> for &'self str {
|
||||
} else {
|
||||
self.matches_index_iter(needle)
|
||||
.next()
|
||||
.map_consume(|(start, _end)| start)
|
||||
.map_move(|(start, _end)| start)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -500,7 +500,7 @@ impl RuntimeGlue {
|
||||
OldTask(ptr) => rt::rust_task_kill_other(ptr),
|
||||
NewTask(handle) => {
|
||||
let mut handle = handle;
|
||||
do handle.kill().map_consume |killed_task| {
|
||||
do handle.kill().map_move |killed_task| {
|
||||
let killed_task = Cell::new(killed_task);
|
||||
do Local::borrow::<Scheduler, ()> |sched| {
|
||||
sched.enqueue_task(killed_task.take());
|
||||
@ -682,7 +682,7 @@ fn spawn_raw_newsched(mut opts: TaskOpts, f: ~fn()) {
|
||||
// Child task runs this code.
|
||||
|
||||
// If child data is 'None', the enlist is vacuously successful.
|
||||
let enlist_success = do child_data.take().map_consume_default(true) |child_data| {
|
||||
let enlist_success = do child_data.take().map_move_default(true) |child_data| {
|
||||
let child_data = Cell::new(child_data); // :(
|
||||
do Local::borrow::<Task, bool> |me| {
|
||||
let (child_tg, ancestors, is_main) = child_data.take();
|
||||
@ -854,7 +854,7 @@ fn spawn_raw_oldsched(mut opts: TaskOpts, f: ~fn()) {
|
||||
// Even if the below code fails to kick the child off, we must
|
||||
// send Something on the notify channel.
|
||||
|
||||
let notifier = notify_chan.map_consume(|c| AutoNotify(c));
|
||||
let notifier = notify_chan.map_move(|c| AutoNotify(c));
|
||||
|
||||
if enlist_many(OldTask(child), &child_arc, &mut ancestors) {
|
||||
let group = @@mut Taskgroup(child_arc, ancestors, is_main, notifier);
|
||||
|
@ -888,7 +888,7 @@ pub fn new_sctable_internal() -> SCTable {
|
||||
// fetch the SCTable from TLS, create one if it doesn't yet exist.
|
||||
pub fn get_sctable() -> @mut SCTable {
|
||||
static sctable_key: local_data::Key<@@mut SCTable> = &local_data::Key;
|
||||
match local_data::get(sctable_key, |k| k.map(|&k| *k)) {
|
||||
match local_data::get(sctable_key, |k| k.map_move(|k| *k)) {
|
||||
None => {
|
||||
let new_table = @@mut new_sctable_internal();
|
||||
local_data::set(sctable_key,new_table);
|
||||
|
@ -83,7 +83,7 @@ impl AttrMetaMethods for MetaItem {
|
||||
}
|
||||
|
||||
pub fn name_str_pair(&self) -> Option<(@str, @str)> {
|
||||
self.value_str().map_consume(|s| (self.name(), s))
|
||||
self.value_str().map_move(|s| (self.name(), s))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -192,7 +192,7 @@ fn print_maybe_styled(msg: &str, color: term::attr::Attr) {
|
||||
let stderr = io::stderr();
|
||||
|
||||
if stderr.get_type() == io::Screen {
|
||||
let t = match local_data::get(tls_terminal, |v| v.map_consume(|&k|k)) {
|
||||
let t = match local_data::get(tls_terminal, |v| v.map_move(|k| *k)) {
|
||||
None => {
|
||||
let t = term::Terminal::new(stderr);
|
||||
let tls = @match t {
|
||||
|
@ -479,7 +479,7 @@ impl <K: Eq + Hash + IterBytes + 'static, V: 'static> MapChain<K,V>{
|
||||
ConsMapChain(ref map,_) => map
|
||||
};
|
||||
// strip one layer of indirection off the pointer.
|
||||
map.find(key).map(|r| {**r})
|
||||
map.find(key).map_move(|r| {*r})
|
||||
}
|
||||
|
||||
// insert the binding into the top-level map
|
||||
|
@ -591,7 +591,7 @@ impl AstBuilder for @ExtCtxt {
|
||||
|
||||
fn expr_if(&self, span: span,
|
||||
cond: @ast::expr, then: @ast::expr, els: Option<@ast::expr>) -> @ast::expr {
|
||||
let els = els.map(|x| self.expr_block(self.block_expr(*x)));
|
||||
let els = els.map_move(|x| self.expr_block(self.block_expr(x)));
|
||||
self.expr(span, ast::expr_if(cond, self.block_expr(then), els))
|
||||
}
|
||||
|
||||
|
@ -417,7 +417,7 @@ fn noop_fold_stmt(s: &stmt_, fld: @ast_fold) -> Option<stmt_> {
|
||||
fn noop_fold_arm(a: &arm, fld: @ast_fold) -> arm {
|
||||
arm {
|
||||
pats: a.pats.map(|x| fld.fold_pat(*x)),
|
||||
guard: a.guard.map(|x| fld.fold_expr(*x)),
|
||||
guard: a.guard.map_move(|x| fld.fold_expr(x)),
|
||||
body: fld.fold_block(&a.body),
|
||||
}
|
||||
}
|
||||
@ -429,7 +429,7 @@ pub fn noop_fold_pat(p: &pat_, fld: @ast_fold) -> pat_ {
|
||||
pat_ident(
|
||||
binding_mode,
|
||||
fld.fold_path(pth),
|
||||
sub.map(|x| fld.fold_pat(*x))
|
||||
sub.map_move(|x| fld.fold_pat(x))
|
||||
)
|
||||
}
|
||||
pat_lit(e) => pat_lit(fld.fold_expr(e)),
|
||||
@ -459,7 +459,7 @@ pub fn noop_fold_pat(p: &pat_, fld: @ast_fold) -> pat_ {
|
||||
pat_vec(ref before, ref slice, ref after) => {
|
||||
pat_vec(
|
||||
before.map(|x| fld.fold_pat(*x)),
|
||||
slice.map(|x| fld.fold_pat(*x)),
|
||||
slice.map_move(|x| fld.fold_pat(x)),
|
||||
after.map(|x| fld.fold_pat(*x))
|
||||
)
|
||||
}
|
||||
@ -551,7 +551,7 @@ pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ {
|
||||
expr_if(
|
||||
fld.fold_expr(cond),
|
||||
fld.fold_block(tr),
|
||||
fl.map(|x| fld.fold_expr(*x))
|
||||
fl.map_move(|x| fld.fold_expr(x))
|
||||
)
|
||||
}
|
||||
expr_while(cond, ref body) => {
|
||||
@ -565,7 +565,7 @@ pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ {
|
||||
expr_loop(ref body, opt_ident) => {
|
||||
expr_loop(
|
||||
fld.fold_block(body),
|
||||
opt_ident.map(|x| fld.fold_ident(*x))
|
||||
opt_ident.map_move(|x| fld.fold_ident(x))
|
||||
)
|
||||
}
|
||||
expr_match(expr, ref arms) => {
|
||||
@ -608,13 +608,13 @@ pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ {
|
||||
expr_path(ref pth) => expr_path(fld.fold_path(pth)),
|
||||
expr_self => expr_self,
|
||||
expr_break(ref opt_ident) => {
|
||||
expr_break(opt_ident.map(|x| fld.fold_ident(*x)))
|
||||
expr_break(opt_ident.map_move(|x| fld.fold_ident(x)))
|
||||
}
|
||||
expr_again(ref opt_ident) => {
|
||||
expr_again(opt_ident.map(|x| fld.fold_ident(*x)))
|
||||
expr_again(opt_ident.map_move(|x| fld.fold_ident(x)))
|
||||
}
|
||||
expr_ret(ref e) => {
|
||||
expr_ret(e.map(|x| fld.fold_expr(*x)))
|
||||
expr_ret(e.map_move(|x| fld.fold_expr(x)))
|
||||
}
|
||||
expr_log(lv, e) => {
|
||||
expr_log(
|
||||
@ -634,7 +634,7 @@ pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ {
|
||||
expr_struct(
|
||||
fld.fold_path(path),
|
||||
fields.map(|x| fold_field(*x)),
|
||||
maybe_expr.map(|x| fld.fold_expr(*x))
|
||||
maybe_expr.map_move(|x| fld.fold_expr(x))
|
||||
)
|
||||
},
|
||||
expr_paren(ex) => expr_paren(fld.fold_expr(ex))
|
||||
@ -731,7 +731,7 @@ fn noop_fold_variant(v: &variant_, fld: @ast_fold) -> variant_ {
|
||||
fold_variant_arg(/*bad*/ (*x).clone())
|
||||
})
|
||||
}
|
||||
struct_variant_kind(struct_def) => {
|
||||
struct_variant_kind(ref struct_def) => {
|
||||
kind = struct_variant_kind(@ast::struct_def {
|
||||
fields: struct_def.fields.iter()
|
||||
.transform(|f| fld.fold_struct_field(*f)).collect(),
|
||||
@ -776,7 +776,7 @@ fn noop_fold_local(l: @Local, fld: @ast_fold) -> @Local {
|
||||
is_mutbl: l.is_mutbl,
|
||||
ty: fld.fold_ty(&l.ty),
|
||||
pat: fld.fold_pat(l.pat),
|
||||
init: l.init.map(|e| fld.fold_expr(*e)),
|
||||
init: l.init.map_move(|e| fld.fold_expr(e)),
|
||||
id: fld.new_id(l.id),
|
||||
span: fld.new_span(l.span),
|
||||
}
|
||||
|
@ -1313,7 +1313,7 @@ impl Parser {
|
||||
|
||||
// If the path might have bounds on it, they should be parsed before
|
||||
// the parameters, e.g. module::TraitName:B1+B2<T>
|
||||
before_tps.map_consume(|callback| callback());
|
||||
before_tps.map_move(|callback| callback());
|
||||
|
||||
// Parse the (obsolete) trailing region parameter, if any, which will
|
||||
// be written "foo/&x"
|
||||
|
@ -486,7 +486,7 @@ fn mk_fresh_ident_interner() -> @ident_interner {
|
||||
pub fn get_ident_interner() -> @ident_interner {
|
||||
static key: local_data::Key<@@::parse::token::ident_interner> =
|
||||
&local_data::Key;
|
||||
match local_data::get(key, |k| k.map(|&k| *k)) {
|
||||
match local_data::get(key, |k| k.map_move(|k| *k)) {
|
||||
Some(interner) => *interner,
|
||||
None => {
|
||||
let interner = mk_fresh_ident_interner();
|
||||
|
Loading…
Reference in New Issue
Block a user